|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.974690994702766, |
|
"eval_steps": 500, |
|
"global_step": 1272, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004708652148322542, |
|
"grad_norm": 5.121918678283691, |
|
"learning_rate": 7.8125e-08, |
|
"loss": 0.9126, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009417304296645085, |
|
"grad_norm": 5.174776077270508, |
|
"learning_rate": 1.5625e-07, |
|
"loss": 0.9156, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.014125956444967627, |
|
"grad_norm": 5.343864440917969, |
|
"learning_rate": 2.3437500000000003e-07, |
|
"loss": 0.9331, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01883460859329017, |
|
"grad_norm": 5.055787563323975, |
|
"learning_rate": 3.125e-07, |
|
"loss": 0.9206, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.023543260741612712, |
|
"grad_norm": 5.8190765380859375, |
|
"learning_rate": 3.90625e-07, |
|
"loss": 1.0045, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.028251912889935255, |
|
"grad_norm": 5.174574375152588, |
|
"learning_rate": 4.6875000000000006e-07, |
|
"loss": 0.9213, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0329605650382578, |
|
"grad_norm": 5.184887886047363, |
|
"learning_rate": 5.468750000000001e-07, |
|
"loss": 0.9327, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.03766921718658034, |
|
"grad_norm": 5.018477439880371, |
|
"learning_rate": 6.25e-07, |
|
"loss": 0.8872, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04237786933490288, |
|
"grad_norm": 4.189849853515625, |
|
"learning_rate": 7.03125e-07, |
|
"loss": 0.8224, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.047086521483225424, |
|
"grad_norm": 4.744853973388672, |
|
"learning_rate": 7.8125e-07, |
|
"loss": 0.9135, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05179517363154797, |
|
"grad_norm": 4.5790886878967285, |
|
"learning_rate": 8.59375e-07, |
|
"loss": 0.9163, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05650382577987051, |
|
"grad_norm": 3.080500364303589, |
|
"learning_rate": 9.375000000000001e-07, |
|
"loss": 0.8982, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06121247792819305, |
|
"grad_norm": 3.0505118370056152, |
|
"learning_rate": 1.0156250000000001e-06, |
|
"loss": 0.8778, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0659211300765156, |
|
"grad_norm": 3.0311391353607178, |
|
"learning_rate": 1.0937500000000001e-06, |
|
"loss": 0.8865, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07062978222483814, |
|
"grad_norm": 2.7465591430664062, |
|
"learning_rate": 1.1718750000000001e-06, |
|
"loss": 0.8397, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07533843437316068, |
|
"grad_norm": 2.4795093536376953, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.8057, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08004708652148322, |
|
"grad_norm": 2.3555572032928467, |
|
"learning_rate": 1.328125e-06, |
|
"loss": 0.7827, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08475573866980576, |
|
"grad_norm": 2.6421377658843994, |
|
"learning_rate": 1.40625e-06, |
|
"loss": 0.8337, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0894643908181283, |
|
"grad_norm": 2.405103921890259, |
|
"learning_rate": 1.484375e-06, |
|
"loss": 0.7903, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.09417304296645085, |
|
"grad_norm": 2.2684788703918457, |
|
"learning_rate": 1.5625e-06, |
|
"loss": 0.7974, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09888169511477339, |
|
"grad_norm": 2.151175022125244, |
|
"learning_rate": 1.640625e-06, |
|
"loss": 0.7783, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10359034726309593, |
|
"grad_norm": 2.6126651763916016, |
|
"learning_rate": 1.71875e-06, |
|
"loss": 0.7367, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.10829899941141848, |
|
"grad_norm": 2.518990993499756, |
|
"learning_rate": 1.796875e-06, |
|
"loss": 0.7653, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11300765155974102, |
|
"grad_norm": 1.8703638315200806, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 0.7007, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.11771630370806356, |
|
"grad_norm": 2.162489652633667, |
|
"learning_rate": 1.953125e-06, |
|
"loss": 0.7049, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1224249558563861, |
|
"grad_norm": 2.2163379192352295, |
|
"learning_rate": 2.0312500000000002e-06, |
|
"loss": 0.7253, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.12713360800470866, |
|
"grad_norm": 1.8689916133880615, |
|
"learning_rate": 2.109375e-06, |
|
"loss": 0.7046, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1318422601530312, |
|
"grad_norm": 1.7359975576400757, |
|
"learning_rate": 2.1875000000000002e-06, |
|
"loss": 0.6888, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.13655091230135374, |
|
"grad_norm": 1.7069369554519653, |
|
"learning_rate": 2.265625e-06, |
|
"loss": 0.6856, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14125956444967627, |
|
"grad_norm": 1.7160871028900146, |
|
"learning_rate": 2.3437500000000002e-06, |
|
"loss": 0.6859, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.14596821659799883, |
|
"grad_norm": 1.6506415605545044, |
|
"learning_rate": 2.421875e-06, |
|
"loss": 0.6525, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.15067686874632136, |
|
"grad_norm": 1.59455406665802, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.6459, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1553855208946439, |
|
"grad_norm": 1.5882025957107544, |
|
"learning_rate": 2.5781250000000004e-06, |
|
"loss": 0.6538, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.16009417304296644, |
|
"grad_norm": 1.4623929262161255, |
|
"learning_rate": 2.65625e-06, |
|
"loss": 0.6292, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.164802825191289, |
|
"grad_norm": 1.6212574243545532, |
|
"learning_rate": 2.7343750000000004e-06, |
|
"loss": 0.6548, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.16951147733961153, |
|
"grad_norm": 1.4372034072875977, |
|
"learning_rate": 2.8125e-06, |
|
"loss": 0.6276, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.17422012948793408, |
|
"grad_norm": 1.4232583045959473, |
|
"learning_rate": 2.8906250000000004e-06, |
|
"loss": 0.6112, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.1789287816362566, |
|
"grad_norm": 1.3985645771026611, |
|
"learning_rate": 2.96875e-06, |
|
"loss": 0.6117, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.18363743378457917, |
|
"grad_norm": 1.4189324378967285, |
|
"learning_rate": 3.0468750000000004e-06, |
|
"loss": 0.6046, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1883460859329017, |
|
"grad_norm": 1.5376724004745483, |
|
"learning_rate": 3.125e-06, |
|
"loss": 0.6116, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19305473808122425, |
|
"grad_norm": 1.7375065088272095, |
|
"learning_rate": 3.2031250000000004e-06, |
|
"loss": 0.6435, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.19776339022954678, |
|
"grad_norm": 1.5353353023529053, |
|
"learning_rate": 3.28125e-06, |
|
"loss": 0.576, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.20247204237786934, |
|
"grad_norm": 1.706644058227539, |
|
"learning_rate": 3.3593750000000003e-06, |
|
"loss": 0.581, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.20718069452619187, |
|
"grad_norm": 1.9466679096221924, |
|
"learning_rate": 3.4375e-06, |
|
"loss": 0.622, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.21188934667451442, |
|
"grad_norm": 2.004645347595215, |
|
"learning_rate": 3.5156250000000003e-06, |
|
"loss": 0.6254, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.21659799882283695, |
|
"grad_norm": 2.015970468521118, |
|
"learning_rate": 3.59375e-06, |
|
"loss": 0.6257, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2213066509711595, |
|
"grad_norm": 1.8542283773422241, |
|
"learning_rate": 3.6718750000000003e-06, |
|
"loss": 0.5857, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.22601530311948204, |
|
"grad_norm": 1.8879379034042358, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.5901, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2307239552678046, |
|
"grad_norm": 2.0544469356536865, |
|
"learning_rate": 3.828125000000001e-06, |
|
"loss": 0.6142, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.23543260741612712, |
|
"grad_norm": 1.9305391311645508, |
|
"learning_rate": 3.90625e-06, |
|
"loss": 0.5927, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24014125956444968, |
|
"grad_norm": 2.0514976978302, |
|
"learning_rate": 3.984375e-06, |
|
"loss": 0.6225, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2448499117127722, |
|
"grad_norm": 1.771240472793579, |
|
"learning_rate": 4.0625000000000005e-06, |
|
"loss": 0.5577, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.24955856386109476, |
|
"grad_norm": 1.7672276496887207, |
|
"learning_rate": 4.140625000000001e-06, |
|
"loss": 0.5625, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2542672160094173, |
|
"grad_norm": 1.8797359466552734, |
|
"learning_rate": 4.21875e-06, |
|
"loss": 0.5839, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2589758681577399, |
|
"grad_norm": 1.905333399772644, |
|
"learning_rate": 4.296875e-06, |
|
"loss": 0.5837, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2636845203060624, |
|
"grad_norm": 2.143460273742676, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 0.6485, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.26839317245438493, |
|
"grad_norm": 1.9987819194793701, |
|
"learning_rate": 4.453125000000001e-06, |
|
"loss": 0.591, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2731018246027075, |
|
"grad_norm": 2.0481247901916504, |
|
"learning_rate": 4.53125e-06, |
|
"loss": 0.5949, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.27781047675103004, |
|
"grad_norm": 1.8108389377593994, |
|
"learning_rate": 4.609375e-06, |
|
"loss": 0.5264, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.28251912889935255, |
|
"grad_norm": 1.840555191040039, |
|
"learning_rate": 4.6875000000000004e-06, |
|
"loss": 0.5586, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2872277810476751, |
|
"grad_norm": 1.7868009805679321, |
|
"learning_rate": 4.765625000000001e-06, |
|
"loss": 0.5445, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.29193643319599766, |
|
"grad_norm": 1.8135056495666504, |
|
"learning_rate": 4.84375e-06, |
|
"loss": 0.5522, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2966450853443202, |
|
"grad_norm": 1.8280186653137207, |
|
"learning_rate": 4.921875e-06, |
|
"loss": 0.5493, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3013537374926427, |
|
"grad_norm": 1.9932650327682495, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5866, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.30606238964096527, |
|
"grad_norm": 1.926773190498352, |
|
"learning_rate": 4.9999915457391215e-06, |
|
"loss": 0.5705, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3107710417892878, |
|
"grad_norm": 1.8135457038879395, |
|
"learning_rate": 4.999966183013663e-06, |
|
"loss": 0.5475, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3154796939376104, |
|
"grad_norm": 1.7810639142990112, |
|
"learning_rate": 4.999923911995162e-06, |
|
"loss": 0.5442, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3201883460859329, |
|
"grad_norm": 1.7810635566711426, |
|
"learning_rate": 4.999864732969518e-06, |
|
"loss": 0.5468, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.32489699823425544, |
|
"grad_norm": 1.7482165098190308, |
|
"learning_rate": 4.9997886463369815e-06, |
|
"loss": 0.5292, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.329605650382578, |
|
"grad_norm": 1.8129892349243164, |
|
"learning_rate": 4.999695652612156e-06, |
|
"loss": 0.5645, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.33431430253090055, |
|
"grad_norm": 1.8132847547531128, |
|
"learning_rate": 4.999585752423999e-06, |
|
"loss": 0.5458, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.33902295467922305, |
|
"grad_norm": 1.8230698108673096, |
|
"learning_rate": 4.999458946515808e-06, |
|
"loss": 0.5455, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3437316068275456, |
|
"grad_norm": 1.8087327480316162, |
|
"learning_rate": 4.999315235745224e-06, |
|
"loss": 0.5467, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.34844025897586817, |
|
"grad_norm": 1.8371257781982422, |
|
"learning_rate": 4.999154621084221e-06, |
|
"loss": 0.559, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3531489111241907, |
|
"grad_norm": 1.7116756439208984, |
|
"learning_rate": 4.998977103619103e-06, |
|
"loss": 0.531, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.3578575632725132, |
|
"grad_norm": 1.8413268327713013, |
|
"learning_rate": 4.9987826845504916e-06, |
|
"loss": 0.5471, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3625662154208358, |
|
"grad_norm": 1.7932018041610718, |
|
"learning_rate": 4.9985713651933235e-06, |
|
"loss": 0.5405, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.36727486756915834, |
|
"grad_norm": 1.739492654800415, |
|
"learning_rate": 4.998343146976837e-06, |
|
"loss": 0.5233, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3719835197174809, |
|
"grad_norm": 1.6671689748764038, |
|
"learning_rate": 4.998098031444567e-06, |
|
"loss": 0.5129, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3766921718658034, |
|
"grad_norm": 1.7220134735107422, |
|
"learning_rate": 4.997836020254328e-06, |
|
"loss": 0.5167, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.38140082401412595, |
|
"grad_norm": 1.743154764175415, |
|
"learning_rate": 4.99755711517821e-06, |
|
"loss": 0.5116, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.3861094761624485, |
|
"grad_norm": 1.5877171754837036, |
|
"learning_rate": 4.9972613181025605e-06, |
|
"loss": 0.5058, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.39081812831077106, |
|
"grad_norm": 1.6139500141143799, |
|
"learning_rate": 4.996948631027978e-06, |
|
"loss": 0.523, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.39552678045909356, |
|
"grad_norm": 1.6250971555709839, |
|
"learning_rate": 4.996619056069292e-06, |
|
"loss": 0.5277, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4002354326074161, |
|
"grad_norm": 1.4910732507705688, |
|
"learning_rate": 4.996272595455553e-06, |
|
"loss": 0.4913, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4049440847557387, |
|
"grad_norm": 1.590549349784851, |
|
"learning_rate": 4.995909251530014e-06, |
|
"loss": 0.5074, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.40965273690406123, |
|
"grad_norm": 1.728352665901184, |
|
"learning_rate": 4.995529026750121e-06, |
|
"loss": 0.5258, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.41436138905238373, |
|
"grad_norm": 1.5192519426345825, |
|
"learning_rate": 4.995131923687488e-06, |
|
"loss": 0.5165, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4190700412007063, |
|
"grad_norm": 1.5756913423538208, |
|
"learning_rate": 4.994717945027886e-06, |
|
"loss": 0.5299, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.42377869334902885, |
|
"grad_norm": 1.5243970155715942, |
|
"learning_rate": 4.9942870935712215e-06, |
|
"loss": 0.5292, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4284873454973514, |
|
"grad_norm": 1.583005428314209, |
|
"learning_rate": 4.993839372231519e-06, |
|
"loss": 0.5074, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4331959976456739, |
|
"grad_norm": 1.5291098356246948, |
|
"learning_rate": 4.993374784036902e-06, |
|
"loss": 0.523, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.43790464979399646, |
|
"grad_norm": 1.4610439538955688, |
|
"learning_rate": 4.992893332129568e-06, |
|
"loss": 0.4949, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.442613301942319, |
|
"grad_norm": 1.5078333616256714, |
|
"learning_rate": 4.992395019765776e-06, |
|
"loss": 0.5043, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.44732195409064157, |
|
"grad_norm": 1.5027759075164795, |
|
"learning_rate": 4.991879850315813e-06, |
|
"loss": 0.5022, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.45203060623896407, |
|
"grad_norm": 1.3885493278503418, |
|
"learning_rate": 4.991347827263983e-06, |
|
"loss": 0.4761, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.45673925838728663, |
|
"grad_norm": 1.3093187808990479, |
|
"learning_rate": 4.990798954208574e-06, |
|
"loss": 0.4884, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4614479105356092, |
|
"grad_norm": 1.391524314880371, |
|
"learning_rate": 4.99023323486184e-06, |
|
"loss": 0.5107, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.46615656268393174, |
|
"grad_norm": 1.3108298778533936, |
|
"learning_rate": 4.989650673049971e-06, |
|
"loss": 0.5018, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.47086521483225424, |
|
"grad_norm": 1.3510005474090576, |
|
"learning_rate": 4.98905127271307e-06, |
|
"loss": 0.5027, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.4755738669805768, |
|
"grad_norm": 1.1885933876037598, |
|
"learning_rate": 4.988435037905129e-06, |
|
"loss": 0.4626, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.48028251912889935, |
|
"grad_norm": 1.1308561563491821, |
|
"learning_rate": 4.987801972793993e-06, |
|
"loss": 0.4846, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.4849911712772219, |
|
"grad_norm": 1.134296178817749, |
|
"learning_rate": 4.987152081661343e-06, |
|
"loss": 0.5007, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.4896998234255444, |
|
"grad_norm": 1.1893833875656128, |
|
"learning_rate": 4.986485368902656e-06, |
|
"loss": 0.5055, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.49440847557386697, |
|
"grad_norm": 1.007875919342041, |
|
"learning_rate": 4.985801839027183e-06, |
|
"loss": 0.4775, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.4991171277221895, |
|
"grad_norm": 1.2207539081573486, |
|
"learning_rate": 4.985101496657918e-06, |
|
"loss": 0.5006, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.503825779870512, |
|
"grad_norm": 1.033278465270996, |
|
"learning_rate": 4.9843843465315604e-06, |
|
"loss": 0.4911, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5085344320188346, |
|
"grad_norm": 1.0263997316360474, |
|
"learning_rate": 4.98365039349849e-06, |
|
"loss": 0.4772, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5132430841671571, |
|
"grad_norm": 0.9356820583343506, |
|
"learning_rate": 4.982899642522732e-06, |
|
"loss": 0.4779, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5179517363154797, |
|
"grad_norm": 0.8354411721229553, |
|
"learning_rate": 4.982132098681923e-06, |
|
"loss": 0.4846, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5226603884638023, |
|
"grad_norm": 0.8493514657020569, |
|
"learning_rate": 4.981347767167273e-06, |
|
"loss": 0.5135, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5273690406121248, |
|
"grad_norm": 0.980951726436615, |
|
"learning_rate": 4.980546653283538e-06, |
|
"loss": 0.4827, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5320776927604474, |
|
"grad_norm": 0.6815721392631531, |
|
"learning_rate": 4.979728762448979e-06, |
|
"loss": 0.4508, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5367863449087699, |
|
"grad_norm": 0.7471316456794739, |
|
"learning_rate": 4.978894100195325e-06, |
|
"loss": 0.5024, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5414949970570924, |
|
"grad_norm": 0.6778037548065186, |
|
"learning_rate": 4.978042672167739e-06, |
|
"loss": 0.4927, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.546203649205415, |
|
"grad_norm": 0.8497841954231262, |
|
"learning_rate": 4.977174484124776e-06, |
|
"loss": 0.4908, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5509123013537375, |
|
"grad_norm": 0.6059902310371399, |
|
"learning_rate": 4.9762895419383475e-06, |
|
"loss": 0.4703, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5556209535020601, |
|
"grad_norm": 0.6304749250411987, |
|
"learning_rate": 4.975387851593677e-06, |
|
"loss": 0.5208, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5603296056503826, |
|
"grad_norm": 0.6655656099319458, |
|
"learning_rate": 4.9744694191892675e-06, |
|
"loss": 0.5069, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5650382577987051, |
|
"grad_norm": 0.6231979131698608, |
|
"learning_rate": 4.973534250936851e-06, |
|
"loss": 0.4722, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.5697469099470277, |
|
"grad_norm": 0.5612891912460327, |
|
"learning_rate": 4.972582353161354e-06, |
|
"loss": 0.4688, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5744555620953502, |
|
"grad_norm": 0.4817934036254883, |
|
"learning_rate": 4.9716137323008485e-06, |
|
"loss": 0.457, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5791642142436727, |
|
"grad_norm": 0.4845065176486969, |
|
"learning_rate": 4.970628394906514e-06, |
|
"loss": 0.4719, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.5838728663919953, |
|
"grad_norm": 0.5664409399032593, |
|
"learning_rate": 4.96962634764259e-06, |
|
"loss": 0.4985, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.5885815185403178, |
|
"grad_norm": 0.4690743386745453, |
|
"learning_rate": 4.968607597286332e-06, |
|
"loss": 0.4759, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.5932901706886404, |
|
"grad_norm": 0.4690413773059845, |
|
"learning_rate": 4.967572150727965e-06, |
|
"loss": 0.4837, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.5979988228369629, |
|
"grad_norm": 0.4350973069667816, |
|
"learning_rate": 4.966520014970637e-06, |
|
"loss": 0.4679, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6027074749852854, |
|
"grad_norm": 0.4283631145954132, |
|
"learning_rate": 4.965451197130373e-06, |
|
"loss": 0.4495, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.607416127133608, |
|
"grad_norm": 0.5446098446846008, |
|
"learning_rate": 4.964365704436023e-06, |
|
"loss": 0.4868, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6121247792819305, |
|
"grad_norm": 0.4670584797859192, |
|
"learning_rate": 4.963263544229219e-06, |
|
"loss": 0.4717, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.616833431430253, |
|
"grad_norm": 0.4510785639286041, |
|
"learning_rate": 4.962144723964322e-06, |
|
"loss": 0.4777, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6215420835785757, |
|
"grad_norm": 0.42210468649864197, |
|
"learning_rate": 4.961009251208368e-06, |
|
"loss": 0.452, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6262507357268982, |
|
"grad_norm": 0.42281368374824524, |
|
"learning_rate": 4.959857133641025e-06, |
|
"loss": 0.4547, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6309593878752208, |
|
"grad_norm": 0.5155296325683594, |
|
"learning_rate": 4.9586883790545345e-06, |
|
"loss": 0.4736, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6356680400235433, |
|
"grad_norm": 0.453692227602005, |
|
"learning_rate": 4.957502995353663e-06, |
|
"loss": 0.4761, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6403766921718658, |
|
"grad_norm": 0.4068131744861603, |
|
"learning_rate": 4.956300990555643e-06, |
|
"loss": 0.4593, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6450853443201884, |
|
"grad_norm": 0.43510329723358154, |
|
"learning_rate": 4.955082372790124e-06, |
|
"loss": 0.4581, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6497939964685109, |
|
"grad_norm": 0.44176122546195984, |
|
"learning_rate": 4.953847150299119e-06, |
|
"loss": 0.4584, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6545026486168334, |
|
"grad_norm": 0.47031915187835693, |
|
"learning_rate": 4.952595331436939e-06, |
|
"loss": 0.4646, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.659211300765156, |
|
"grad_norm": 0.4248315989971161, |
|
"learning_rate": 4.951326924670148e-06, |
|
"loss": 0.4636, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6639199529134785, |
|
"grad_norm": 0.40988391637802124, |
|
"learning_rate": 4.9500419385775e-06, |
|
"loss": 0.4442, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6686286050618011, |
|
"grad_norm": 0.471097856760025, |
|
"learning_rate": 4.948740381849879e-06, |
|
"loss": 0.4693, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6733372572101236, |
|
"grad_norm": 0.46013182401657104, |
|
"learning_rate": 4.947422263290249e-06, |
|
"loss": 0.4683, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6780459093584461, |
|
"grad_norm": 0.45335468649864197, |
|
"learning_rate": 4.946087591813581e-06, |
|
"loss": 0.4739, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.6827545615067687, |
|
"grad_norm": 0.4198642671108246, |
|
"learning_rate": 4.944736376446804e-06, |
|
"loss": 0.4516, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.6874632136550912, |
|
"grad_norm": 0.5052395462989807, |
|
"learning_rate": 4.943368626328741e-06, |
|
"loss": 0.465, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.6921718658034137, |
|
"grad_norm": 0.462447851896286, |
|
"learning_rate": 4.941984350710045e-06, |
|
"loss": 0.479, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.6968805179517363, |
|
"grad_norm": 0.4185675084590912, |
|
"learning_rate": 4.940583558953138e-06, |
|
"loss": 0.4458, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7015891701000588, |
|
"grad_norm": 0.4506431519985199, |
|
"learning_rate": 4.939166260532145e-06, |
|
"loss": 0.4866, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7062978222483814, |
|
"grad_norm": 0.41306138038635254, |
|
"learning_rate": 4.937732465032838e-06, |
|
"loss": 0.4834, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.711006474396704, |
|
"grad_norm": 0.42707082629203796, |
|
"learning_rate": 4.936282182152559e-06, |
|
"loss": 0.4582, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7157151265450264, |
|
"grad_norm": 0.5058028101921082, |
|
"learning_rate": 4.934815421700166e-06, |
|
"loss": 0.4883, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7204237786933491, |
|
"grad_norm": 0.476229727268219, |
|
"learning_rate": 4.933332193595956e-06, |
|
"loss": 0.4913, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7251324308416716, |
|
"grad_norm": 0.40548497438430786, |
|
"learning_rate": 4.931832507871611e-06, |
|
"loss": 0.4497, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7298410829899941, |
|
"grad_norm": 0.401818186044693, |
|
"learning_rate": 4.930316374670115e-06, |
|
"loss": 0.4444, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7345497351383167, |
|
"grad_norm": 0.3804225027561188, |
|
"learning_rate": 4.9287838042457e-06, |
|
"loss": 0.4499, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7392583872866392, |
|
"grad_norm": 0.5111300349235535, |
|
"learning_rate": 4.927234806963763e-06, |
|
"loss": 0.4635, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7439670394349618, |
|
"grad_norm": 0.4021705389022827, |
|
"learning_rate": 4.925669393300808e-06, |
|
"loss": 0.4597, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7486756915832843, |
|
"grad_norm": 0.39262616634368896, |
|
"learning_rate": 4.924087573844365e-06, |
|
"loss": 0.4607, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7533843437316068, |
|
"grad_norm": 0.3751242160797119, |
|
"learning_rate": 4.922489359292928e-06, |
|
"loss": 0.4376, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7580929958799294, |
|
"grad_norm": 0.880349338054657, |
|
"learning_rate": 4.920874760455874e-06, |
|
"loss": 0.4372, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7628016480282519, |
|
"grad_norm": 0.4332122206687927, |
|
"learning_rate": 4.919243788253394e-06, |
|
"loss": 0.4577, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7675103001765744, |
|
"grad_norm": 0.40380653738975525, |
|
"learning_rate": 4.917596453716421e-06, |
|
"loss": 0.4561, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.772218952324897, |
|
"grad_norm": 0.40455198287963867, |
|
"learning_rate": 4.915932767986552e-06, |
|
"loss": 0.4469, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.7769276044732195, |
|
"grad_norm": 0.3832710087299347, |
|
"learning_rate": 4.914252742315972e-06, |
|
"loss": 0.4463, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.7816362566215421, |
|
"grad_norm": 0.46339336037635803, |
|
"learning_rate": 4.9125563880673814e-06, |
|
"loss": 0.4591, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.7863449087698646, |
|
"grad_norm": 0.4066482186317444, |
|
"learning_rate": 4.9108437167139186e-06, |
|
"loss": 0.447, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.7910535609181871, |
|
"grad_norm": 0.38544756174087524, |
|
"learning_rate": 4.90911473983908e-06, |
|
"loss": 0.4556, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.7957622130665097, |
|
"grad_norm": 0.39471447467803955, |
|
"learning_rate": 4.90736946913664e-06, |
|
"loss": 0.443, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.8004708652148322, |
|
"grad_norm": 0.42424091696739197, |
|
"learning_rate": 4.905607916410581e-06, |
|
"loss": 0.4561, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8051795173631549, |
|
"grad_norm": 0.4095331132411957, |
|
"learning_rate": 4.9038300935750026e-06, |
|
"loss": 0.4622, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8098881695114774, |
|
"grad_norm": 0.41514596343040466, |
|
"learning_rate": 4.902036012654048e-06, |
|
"loss": 0.4547, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8145968216597999, |
|
"grad_norm": 0.4074968099594116, |
|
"learning_rate": 4.900225685781819e-06, |
|
"loss": 0.4555, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8193054738081225, |
|
"grad_norm": 0.40082669258117676, |
|
"learning_rate": 4.898399125202296e-06, |
|
"loss": 0.4473, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.824014125956445, |
|
"grad_norm": 0.49021676182746887, |
|
"learning_rate": 4.896556343269254e-06, |
|
"loss": 0.4404, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8287227781047675, |
|
"grad_norm": 0.3969082236289978, |
|
"learning_rate": 4.894697352446182e-06, |
|
"loss": 0.4535, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8334314302530901, |
|
"grad_norm": 0.39450210332870483, |
|
"learning_rate": 4.892822165306195e-06, |
|
"loss": 0.4313, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8381400824014126, |
|
"grad_norm": 0.41130906343460083, |
|
"learning_rate": 4.8909307945319474e-06, |
|
"loss": 0.4725, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8428487345497352, |
|
"grad_norm": 0.3974241614341736, |
|
"learning_rate": 4.8890232529155556e-06, |
|
"loss": 0.4665, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8475573866980577, |
|
"grad_norm": 0.46764135360717773, |
|
"learning_rate": 4.887099553358502e-06, |
|
"loss": 0.4699, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8522660388463802, |
|
"grad_norm": 0.41044411063194275, |
|
"learning_rate": 4.885159708871552e-06, |
|
"loss": 0.4685, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.8569746909947028, |
|
"grad_norm": 0.4245174825191498, |
|
"learning_rate": 4.883203732574668e-06, |
|
"loss": 0.4465, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8616833431430253, |
|
"grad_norm": 0.41129034757614136, |
|
"learning_rate": 4.881231637696918e-06, |
|
"loss": 0.4824, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8663919952913478, |
|
"grad_norm": 0.4054642617702484, |
|
"learning_rate": 4.879243437576383e-06, |
|
"loss": 0.4318, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8711006474396704, |
|
"grad_norm": 0.38721156120300293, |
|
"learning_rate": 4.877239145660075e-06, |
|
"loss": 0.4555, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.8758092995879929, |
|
"grad_norm": 0.41428545117378235, |
|
"learning_rate": 4.8752187755038384e-06, |
|
"loss": 0.4505, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.8805179517363155, |
|
"grad_norm": 0.42160969972610474, |
|
"learning_rate": 4.873182340772263e-06, |
|
"loss": 0.4424, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.885226603884638, |
|
"grad_norm": 0.38617607951164246, |
|
"learning_rate": 4.871129855238589e-06, |
|
"loss": 0.4475, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.8899352560329605, |
|
"grad_norm": 0.398945152759552, |
|
"learning_rate": 4.869061332784614e-06, |
|
"loss": 0.4386, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.8946439081812831, |
|
"grad_norm": 0.3810630738735199, |
|
"learning_rate": 4.866976787400602e-06, |
|
"loss": 0.4471, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.8993525603296056, |
|
"grad_norm": 0.391017347574234, |
|
"learning_rate": 4.864876233185185e-06, |
|
"loss": 0.4541, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9040612124779281, |
|
"grad_norm": 0.4350132644176483, |
|
"learning_rate": 4.862759684345269e-06, |
|
"loss": 0.4685, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.9087698646262508, |
|
"grad_norm": 0.3881378769874573, |
|
"learning_rate": 4.860627155195941e-06, |
|
"loss": 0.4369, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9134785167745733, |
|
"grad_norm": 0.42739424109458923, |
|
"learning_rate": 4.858478660160364e-06, |
|
"loss": 0.4669, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9181871689228959, |
|
"grad_norm": 0.40032345056533813, |
|
"learning_rate": 4.8563142137696906e-06, |
|
"loss": 0.4769, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9228958210712184, |
|
"grad_norm": 0.4069063067436218, |
|
"learning_rate": 4.854133830662955e-06, |
|
"loss": 0.448, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9276044732195409, |
|
"grad_norm": 0.38561198115348816, |
|
"learning_rate": 4.85193752558698e-06, |
|
"loss": 0.4421, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9323131253678635, |
|
"grad_norm": 0.44492074847221375, |
|
"learning_rate": 4.849725313396274e-06, |
|
"loss": 0.4647, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.937021777516186, |
|
"grad_norm": 0.3882367014884949, |
|
"learning_rate": 4.847497209052933e-06, |
|
"loss": 0.4327, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.9417304296645085, |
|
"grad_norm": 0.38339805603027344, |
|
"learning_rate": 4.8452532276265366e-06, |
|
"loss": 0.4464, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9464390818128311, |
|
"grad_norm": 0.3912663161754608, |
|
"learning_rate": 4.842993384294047e-06, |
|
"loss": 0.4656, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9511477339611536, |
|
"grad_norm": 0.38164153695106506, |
|
"learning_rate": 4.84071769433971e-06, |
|
"loss": 0.4683, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9558563861094762, |
|
"grad_norm": 0.40451687574386597, |
|
"learning_rate": 4.838426173154948e-06, |
|
"loss": 0.455, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9605650382577987, |
|
"grad_norm": 0.37911689281463623, |
|
"learning_rate": 4.836118836238253e-06, |
|
"loss": 0.4493, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9652736904061212, |
|
"grad_norm": 0.3586491644382477, |
|
"learning_rate": 4.833795699195089e-06, |
|
"loss": 0.4293, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.9699823425544438, |
|
"grad_norm": 0.3737817406654358, |
|
"learning_rate": 4.83145677773778e-06, |
|
"loss": 0.4433, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.9746909947027663, |
|
"grad_norm": 0.40282636880874634, |
|
"learning_rate": 4.829102087685408e-06, |
|
"loss": 0.4528, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.9793996468510888, |
|
"grad_norm": 0.41744890809059143, |
|
"learning_rate": 4.826731644963705e-06, |
|
"loss": 0.4591, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.9841082989994114, |
|
"grad_norm": 0.39800336956977844, |
|
"learning_rate": 4.824345465604944e-06, |
|
"loss": 0.4433, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.9888169511477339, |
|
"grad_norm": 0.41930362582206726, |
|
"learning_rate": 4.821943565747831e-06, |
|
"loss": 0.4362, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.9935256032960565, |
|
"grad_norm": 0.4097538888454437, |
|
"learning_rate": 4.8195259616373955e-06, |
|
"loss": 0.4528, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.998234255444379, |
|
"grad_norm": 0.3836863040924072, |
|
"learning_rate": 4.817092669624883e-06, |
|
"loss": 0.4621, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.3836863040924072, |
|
"learning_rate": 4.814643706167642e-06, |
|
"loss": 0.4892, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.0047086521483226, |
|
"grad_norm": 0.7609542012214661, |
|
"learning_rate": 4.8121790878290135e-06, |
|
"loss": 0.4259, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.009417304296645, |
|
"grad_norm": 0.3908381462097168, |
|
"learning_rate": 4.809698831278217e-06, |
|
"loss": 0.4192, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.0141259564449676, |
|
"grad_norm": 0.43996715545654297, |
|
"learning_rate": 4.807202953290243e-06, |
|
"loss": 0.4103, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.0188346085932902, |
|
"grad_norm": 0.41817402839660645, |
|
"learning_rate": 4.804691470745734e-06, |
|
"loss": 0.4432, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.0235432607416126, |
|
"grad_norm": 0.3807670772075653, |
|
"learning_rate": 4.8021644006308735e-06, |
|
"loss": 0.3983, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.0282519128899352, |
|
"grad_norm": 0.390556275844574, |
|
"learning_rate": 4.799621760037268e-06, |
|
"loss": 0.4002, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.0329605650382578, |
|
"grad_norm": 0.4105827212333679, |
|
"learning_rate": 4.7970635661618345e-06, |
|
"loss": 0.4409, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0376692171865805, |
|
"grad_norm": 0.42910847067832947, |
|
"learning_rate": 4.7944898363066865e-06, |
|
"loss": 0.4316, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0423778693349028, |
|
"grad_norm": 0.42775776982307434, |
|
"learning_rate": 4.79190058787901e-06, |
|
"loss": 0.4248, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0470865214832255, |
|
"grad_norm": 0.5438376069068909, |
|
"learning_rate": 4.789295838390947e-06, |
|
"loss": 0.4301, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.051795173631548, |
|
"grad_norm": 0.403005450963974, |
|
"learning_rate": 4.786675605459488e-06, |
|
"loss": 0.4118, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.0565038257798705, |
|
"grad_norm": 0.3933987617492676, |
|
"learning_rate": 4.7840399068063355e-06, |
|
"loss": 0.4146, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.061212477928193, |
|
"grad_norm": 0.3724134862422943, |
|
"learning_rate": 4.781388760257799e-06, |
|
"loss": 0.4186, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.0659211300765157, |
|
"grad_norm": 0.3519379794597626, |
|
"learning_rate": 4.778722183744665e-06, |
|
"loss": 0.3863, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.070629782224838, |
|
"grad_norm": 0.4137888252735138, |
|
"learning_rate": 4.77604019530208e-06, |
|
"loss": 0.4398, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.0753384343731607, |
|
"grad_norm": 0.3873429596424103, |
|
"learning_rate": 4.773342813069428e-06, |
|
"loss": 0.4332, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.0800470865214833, |
|
"grad_norm": 0.38474398851394653, |
|
"learning_rate": 4.770630055290209e-06, |
|
"loss": 0.4318, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.0847557386698057, |
|
"grad_norm": 0.4013967514038086, |
|
"learning_rate": 4.767901940311911e-06, |
|
"loss": 0.4429, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.0894643908181283, |
|
"grad_norm": 0.3859746754169464, |
|
"learning_rate": 4.76515848658589e-06, |
|
"loss": 0.4095, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.094173042966451, |
|
"grad_norm": 0.36502712965011597, |
|
"learning_rate": 4.762399712667247e-06, |
|
"loss": 0.4093, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.0988816951147733, |
|
"grad_norm": 0.3925178349018097, |
|
"learning_rate": 4.7596256372146964e-06, |
|
"loss": 0.4176, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.103590347263096, |
|
"grad_norm": 0.3929302990436554, |
|
"learning_rate": 4.7568362789904445e-06, |
|
"loss": 0.4312, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.1082989994114185, |
|
"grad_norm": 0.3633573055267334, |
|
"learning_rate": 4.75403165686006e-06, |
|
"loss": 0.4449, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.113007651559741, |
|
"grad_norm": 0.4058593809604645, |
|
"learning_rate": 4.751211789792349e-06, |
|
"loss": 0.4242, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.1177163037080635, |
|
"grad_norm": 0.37989768385887146, |
|
"learning_rate": 4.748376696859226e-06, |
|
"loss": 0.4035, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.1224249558563861, |
|
"grad_norm": 0.46411606669425964, |
|
"learning_rate": 4.745526397235583e-06, |
|
"loss": 0.388, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.1271336080047087, |
|
"grad_norm": 0.3705718219280243, |
|
"learning_rate": 4.7426609101991605e-06, |
|
"loss": 0.3974, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.1318422601530311, |
|
"grad_norm": 0.3925943374633789, |
|
"learning_rate": 4.7397802551304185e-06, |
|
"loss": 0.4229, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.1365509123013537, |
|
"grad_norm": 0.37178972363471985, |
|
"learning_rate": 4.736884451512405e-06, |
|
"loss": 0.4055, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.1412595644496764, |
|
"grad_norm": 0.39106276631355286, |
|
"learning_rate": 4.733973518930624e-06, |
|
"loss": 0.4418, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.1459682165979987, |
|
"grad_norm": 0.3852050006389618, |
|
"learning_rate": 4.7310474770729e-06, |
|
"loss": 0.4066, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.1506768687463214, |
|
"grad_norm": 0.37759003043174744, |
|
"learning_rate": 4.728106345729253e-06, |
|
"loss": 0.3907, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.155385520894644, |
|
"grad_norm": 0.43599414825439453, |
|
"learning_rate": 4.7251501447917535e-06, |
|
"loss": 0.4632, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.1600941730429664, |
|
"grad_norm": 0.38698461651802063, |
|
"learning_rate": 4.722178894254399e-06, |
|
"loss": 0.4031, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.164802825191289, |
|
"grad_norm": 0.4270411729812622, |
|
"learning_rate": 4.71919261421297e-06, |
|
"loss": 0.4251, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.1695114773396116, |
|
"grad_norm": 0.4065026640892029, |
|
"learning_rate": 4.716191324864899e-06, |
|
"loss": 0.4295, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.1742201294879342, |
|
"grad_norm": 0.38610973954200745, |
|
"learning_rate": 4.7131750465091315e-06, |
|
"loss": 0.3991, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.1789287816362566, |
|
"grad_norm": 0.41599562764167786, |
|
"learning_rate": 4.710143799545994e-06, |
|
"loss": 0.4278, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.1836374337845792, |
|
"grad_norm": 0.429103821516037, |
|
"learning_rate": 4.707097604477045e-06, |
|
"loss": 0.406, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.1883460859329018, |
|
"grad_norm": 0.4257577657699585, |
|
"learning_rate": 4.704036481904949e-06, |
|
"loss": 0.4471, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.1930547380812242, |
|
"grad_norm": 0.38792288303375244, |
|
"learning_rate": 4.700960452533329e-06, |
|
"loss": 0.4114, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.1977633902295468, |
|
"grad_norm": 0.38506487011909485, |
|
"learning_rate": 4.697869537166626e-06, |
|
"loss": 0.4106, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.2024720423778694, |
|
"grad_norm": 0.42475154995918274, |
|
"learning_rate": 4.6947637567099675e-06, |
|
"loss": 0.4208, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.2071806945261918, |
|
"grad_norm": 0.3511320650577545, |
|
"learning_rate": 4.691643132169014e-06, |
|
"loss": 0.4231, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.2118893466745144, |
|
"grad_norm": 0.38943809270858765, |
|
"learning_rate": 4.6885076846498255e-06, |
|
"loss": 0.4013, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.216597998822837, |
|
"grad_norm": 0.3698880672454834, |
|
"learning_rate": 4.685357435358715e-06, |
|
"loss": 0.4055, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.2213066509711594, |
|
"grad_norm": 0.3700575828552246, |
|
"learning_rate": 4.682192405602106e-06, |
|
"loss": 0.4142, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.226015303119482, |
|
"grad_norm": 0.40088504552841187, |
|
"learning_rate": 4.6790126167863875e-06, |
|
"loss": 0.4258, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.2307239552678046, |
|
"grad_norm": 0.4097707271575928, |
|
"learning_rate": 4.675818090417772e-06, |
|
"loss": 0.4199, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.235432607416127, |
|
"grad_norm": 0.4138272702693939, |
|
"learning_rate": 4.672608848102146e-06, |
|
"loss": 0.4601, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.2401412595644496, |
|
"grad_norm": 0.40342196822166443, |
|
"learning_rate": 4.6693849115449265e-06, |
|
"loss": 0.3962, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.2448499117127723, |
|
"grad_norm": 0.39221933484077454, |
|
"learning_rate": 4.666146302550916e-06, |
|
"loss": 0.4273, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.2495585638610947, |
|
"grad_norm": 0.42961645126342773, |
|
"learning_rate": 4.66289304302415e-06, |
|
"loss": 0.3995, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.2542672160094173, |
|
"grad_norm": 0.3756173849105835, |
|
"learning_rate": 4.659625154967751e-06, |
|
"loss": 0.4143, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.2589758681577399, |
|
"grad_norm": 0.3766971230506897, |
|
"learning_rate": 4.656342660483783e-06, |
|
"loss": 0.4316, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.2636845203060623, |
|
"grad_norm": 0.3824644982814789, |
|
"learning_rate": 4.6530455817730965e-06, |
|
"loss": 0.4078, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.2683931724543849, |
|
"grad_norm": 0.4655967056751251, |
|
"learning_rate": 4.649733941135183e-06, |
|
"loss": 0.4123, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.2731018246027075, |
|
"grad_norm": 0.42232146859169006, |
|
"learning_rate": 4.646407760968023e-06, |
|
"loss": 0.4197, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.27781047675103, |
|
"grad_norm": 0.39129775762557983, |
|
"learning_rate": 4.6430670637679295e-06, |
|
"loss": 0.4297, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.2825191288993525, |
|
"grad_norm": 0.41317105293273926, |
|
"learning_rate": 4.639711872129405e-06, |
|
"loss": 0.4479, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.287227781047675, |
|
"grad_norm": 0.38349881768226624, |
|
"learning_rate": 4.636342208744981e-06, |
|
"loss": 0.4113, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.2919364331959977, |
|
"grad_norm": 0.43842265009880066, |
|
"learning_rate": 4.632958096405071e-06, |
|
"loss": 0.4299, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.2966450853443203, |
|
"grad_norm": 0.37726807594299316, |
|
"learning_rate": 4.629559557997805e-06, |
|
"loss": 0.4099, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.3013537374926427, |
|
"grad_norm": 0.3869900703430176, |
|
"learning_rate": 4.62614661650889e-06, |
|
"loss": 0.4244, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.3060623896409653, |
|
"grad_norm": 0.37169286608695984, |
|
"learning_rate": 4.6227192950214435e-06, |
|
"loss": 0.4005, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.310771041789288, |
|
"grad_norm": 0.3810769021511078, |
|
"learning_rate": 4.619277616715843e-06, |
|
"loss": 0.4334, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.3154796939376103, |
|
"grad_norm": 0.3881538510322571, |
|
"learning_rate": 4.615821604869564e-06, |
|
"loss": 0.4163, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.320188346085933, |
|
"grad_norm": 0.37338587641716003, |
|
"learning_rate": 4.612351282857027e-06, |
|
"loss": 0.4233, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.3248969982342556, |
|
"grad_norm": 0.3771686851978302, |
|
"learning_rate": 4.6088666741494395e-06, |
|
"loss": 0.4024, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.329605650382578, |
|
"grad_norm": 0.3833746910095215, |
|
"learning_rate": 4.605367802314632e-06, |
|
"loss": 0.3932, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.3343143025309006, |
|
"grad_norm": 0.37729066610336304, |
|
"learning_rate": 4.601854691016907e-06, |
|
"loss": 0.4022, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.3390229546792232, |
|
"grad_norm": 0.3654945492744446, |
|
"learning_rate": 4.598327364016871e-06, |
|
"loss": 0.4089, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.3437316068275456, |
|
"grad_norm": 0.39260923862457275, |
|
"learning_rate": 4.5947858451712775e-06, |
|
"loss": 0.4286, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.3484402589758682, |
|
"grad_norm": 0.4227854609489441, |
|
"learning_rate": 4.591230158432867e-06, |
|
"loss": 0.4287, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.3531489111241908, |
|
"grad_norm": 0.4016524851322174, |
|
"learning_rate": 4.587660327850203e-06, |
|
"loss": 0.4132, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.3578575632725132, |
|
"grad_norm": 0.3761261999607086, |
|
"learning_rate": 4.584076377567506e-06, |
|
"loss": 0.426, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.3625662154208358, |
|
"grad_norm": 0.3990737795829773, |
|
"learning_rate": 4.580478331824499e-06, |
|
"loss": 0.4224, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.3672748675691584, |
|
"grad_norm": 0.3864597976207733, |
|
"learning_rate": 4.576866214956235e-06, |
|
"loss": 0.4021, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.3719835197174808, |
|
"grad_norm": 0.39642560482025146, |
|
"learning_rate": 4.5732400513929354e-06, |
|
"loss": 0.4373, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.3766921718658034, |
|
"grad_norm": 0.3693861663341522, |
|
"learning_rate": 4.569599865659829e-06, |
|
"loss": 0.4239, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.381400824014126, |
|
"grad_norm": 0.374732106924057, |
|
"learning_rate": 4.565945682376978e-06, |
|
"loss": 0.4138, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.3861094761624484, |
|
"grad_norm": 0.3904346227645874, |
|
"learning_rate": 4.562277526259117e-06, |
|
"loss": 0.4371, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.390818128310771, |
|
"grad_norm": 0.42636239528656006, |
|
"learning_rate": 4.558595422115486e-06, |
|
"loss": 0.4326, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.3955267804590936, |
|
"grad_norm": 0.4012644290924072, |
|
"learning_rate": 4.55489939484966e-06, |
|
"loss": 0.417, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.400235432607416, |
|
"grad_norm": 0.3714677393436432, |
|
"learning_rate": 4.551189469459382e-06, |
|
"loss": 0.4112, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.4049440847557386, |
|
"grad_norm": 0.36800074577331543, |
|
"learning_rate": 4.547465671036394e-06, |
|
"loss": 0.4136, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.4096527369040612, |
|
"grad_norm": 0.42034563422203064, |
|
"learning_rate": 4.543728024766265e-06, |
|
"loss": 0.4389, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.4143613890523836, |
|
"grad_norm": 0.37657004594802856, |
|
"learning_rate": 4.539976555928227e-06, |
|
"loss": 0.4241, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.4190700412007062, |
|
"grad_norm": 0.3639003336429596, |
|
"learning_rate": 4.536211289894995e-06, |
|
"loss": 0.4055, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.4237786933490288, |
|
"grad_norm": 0.3953859210014343, |
|
"learning_rate": 4.532432252132603e-06, |
|
"loss": 0.4317, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.4284873454973515, |
|
"grad_norm": 0.3482641279697418, |
|
"learning_rate": 4.5286394682002265e-06, |
|
"loss": 0.4023, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.4331959976456738, |
|
"grad_norm": 0.41340556740760803, |
|
"learning_rate": 4.524832963750015e-06, |
|
"loss": 0.4301, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.4379046497939965, |
|
"grad_norm": 0.39513641595840454, |
|
"learning_rate": 4.521012764526913e-06, |
|
"loss": 0.4083, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.442613301942319, |
|
"grad_norm": 0.3495503067970276, |
|
"learning_rate": 4.517178896368489e-06, |
|
"loss": 0.3983, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.4473219540906417, |
|
"grad_norm": 0.7292077541351318, |
|
"learning_rate": 4.513331385204762e-06, |
|
"loss": 0.4199, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.452030606238964, |
|
"grad_norm": 0.403432160615921, |
|
"learning_rate": 4.50947025705802e-06, |
|
"loss": 0.4303, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.4567392583872867, |
|
"grad_norm": 0.3867107331752777, |
|
"learning_rate": 4.505595538042652e-06, |
|
"loss": 0.4178, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.4614479105356093, |
|
"grad_norm": 0.39861124753952026, |
|
"learning_rate": 4.501707254364967e-06, |
|
"loss": 0.4134, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.4661565626839317, |
|
"grad_norm": 0.3962036669254303, |
|
"learning_rate": 4.497805432323015e-06, |
|
"loss": 0.4302, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.4708652148322543, |
|
"grad_norm": 0.3967229127883911, |
|
"learning_rate": 4.493890098306414e-06, |
|
"loss": 0.4011, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.475573866980577, |
|
"grad_norm": 0.37047067284584045, |
|
"learning_rate": 4.4899612787961675e-06, |
|
"loss": 0.4066, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.4802825191288993, |
|
"grad_norm": 0.3553713858127594, |
|
"learning_rate": 4.4860190003644895e-06, |
|
"loss": 0.4214, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.484991171277222, |
|
"grad_norm": 0.3747401833534241, |
|
"learning_rate": 4.4820632896746185e-06, |
|
"loss": 0.4022, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.4896998234255445, |
|
"grad_norm": 0.3619938790798187, |
|
"learning_rate": 4.478094173480642e-06, |
|
"loss": 0.3946, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.494408475573867, |
|
"grad_norm": 0.3725791871547699, |
|
"learning_rate": 4.474111678627318e-06, |
|
"loss": 0.4031, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.4991171277221895, |
|
"grad_norm": 0.3898618817329407, |
|
"learning_rate": 4.470115832049885e-06, |
|
"loss": 0.4181, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.5038257798705121, |
|
"grad_norm": 0.3704550862312317, |
|
"learning_rate": 4.466106660773884e-06, |
|
"loss": 0.4043, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.5085344320188345, |
|
"grad_norm": 0.4264324903488159, |
|
"learning_rate": 4.462084191914984e-06, |
|
"loss": 0.401, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.5132430841671571, |
|
"grad_norm": 0.39826300740242004, |
|
"learning_rate": 4.458048452678781e-06, |
|
"loss": 0.3926, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.5179517363154797, |
|
"grad_norm": 0.3568350076675415, |
|
"learning_rate": 4.453999470360633e-06, |
|
"loss": 0.3904, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.5226603884638021, |
|
"grad_norm": 0.37382814288139343, |
|
"learning_rate": 4.449937272345461e-06, |
|
"loss": 0.3873, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.5273690406121248, |
|
"grad_norm": 0.41268882155418396, |
|
"learning_rate": 4.445861886107567e-06, |
|
"loss": 0.4063, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.5320776927604474, |
|
"grad_norm": 0.3825395107269287, |
|
"learning_rate": 4.441773339210459e-06, |
|
"loss": 0.4148, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.5367863449087698, |
|
"grad_norm": 0.38161659240722656, |
|
"learning_rate": 4.437671659306647e-06, |
|
"loss": 0.4228, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.5414949970570924, |
|
"grad_norm": 0.391406387090683, |
|
"learning_rate": 4.43355687413747e-06, |
|
"loss": 0.4283, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.546203649205415, |
|
"grad_norm": 0.9112624526023865, |
|
"learning_rate": 4.429429011532902e-06, |
|
"loss": 0.3876, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.5509123013537374, |
|
"grad_norm": 0.3513050377368927, |
|
"learning_rate": 4.425288099411365e-06, |
|
"loss": 0.4025, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.5556209535020602, |
|
"grad_norm": 0.40149223804473877, |
|
"learning_rate": 4.421134165779538e-06, |
|
"loss": 0.4183, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.5603296056503826, |
|
"grad_norm": 0.3666805624961853, |
|
"learning_rate": 4.416967238732174e-06, |
|
"loss": 0.4268, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.565038257798705, |
|
"grad_norm": 0.4094839096069336, |
|
"learning_rate": 4.412787346451903e-06, |
|
"loss": 0.4255, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.5697469099470278, |
|
"grad_norm": 0.38265460729599, |
|
"learning_rate": 4.408594517209045e-06, |
|
"loss": 0.4095, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.5744555620953502, |
|
"grad_norm": 0.36584508419036865, |
|
"learning_rate": 4.4043887793614186e-06, |
|
"loss": 0.4085, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.5791642142436726, |
|
"grad_norm": 0.3524089455604553, |
|
"learning_rate": 4.400170161354146e-06, |
|
"loss": 0.3903, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.5838728663919954, |
|
"grad_norm": 0.3962515592575073, |
|
"learning_rate": 4.395938691719466e-06, |
|
"loss": 0.4203, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.5885815185403178, |
|
"grad_norm": 0.38059180974960327, |
|
"learning_rate": 4.391694399076537e-06, |
|
"loss": 0.4084, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.5932901706886404, |
|
"grad_norm": 0.37096184492111206, |
|
"learning_rate": 4.387437312131246e-06, |
|
"loss": 0.4077, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.597998822836963, |
|
"grad_norm": 0.34766098856925964, |
|
"learning_rate": 4.383167459676009e-06, |
|
"loss": 0.4085, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.6027074749852854, |
|
"grad_norm": 0.3656401038169861, |
|
"learning_rate": 4.378884870589585e-06, |
|
"loss": 0.4098, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.607416127133608, |
|
"grad_norm": 0.43042734265327454, |
|
"learning_rate": 4.374589573836875e-06, |
|
"loss": 0.4143, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.6121247792819307, |
|
"grad_norm": 0.42422834038734436, |
|
"learning_rate": 4.370281598468725e-06, |
|
"loss": 0.3953, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.616833431430253, |
|
"grad_norm": 0.3533729910850525, |
|
"learning_rate": 4.365960973621735e-06, |
|
"loss": 0.4068, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.6215420835785757, |
|
"grad_norm": 0.36275553703308105, |
|
"learning_rate": 4.361627728518054e-06, |
|
"loss": 0.4014, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.6262507357268983, |
|
"grad_norm": 0.4120238721370697, |
|
"learning_rate": 4.357281892465192e-06, |
|
"loss": 0.4401, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.6309593878752207, |
|
"grad_norm": 0.3676035404205322, |
|
"learning_rate": 4.352923494855813e-06, |
|
"loss": 0.4034, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.6356680400235433, |
|
"grad_norm": 0.3960263729095459, |
|
"learning_rate": 4.348552565167542e-06, |
|
"loss": 0.3941, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.6403766921718659, |
|
"grad_norm": 0.3616948425769806, |
|
"learning_rate": 4.344169132962763e-06, |
|
"loss": 0.399, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.6450853443201883, |
|
"grad_norm": 0.39258840680122375, |
|
"learning_rate": 4.33977322788842e-06, |
|
"loss": 0.4038, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.6497939964685109, |
|
"grad_norm": 0.4016514718532562, |
|
"learning_rate": 4.335364879675814e-06, |
|
"loss": 0.4164, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.6545026486168335, |
|
"grad_norm": 0.38080155849456787, |
|
"learning_rate": 4.330944118140406e-06, |
|
"loss": 0.3953, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.6592113007651559, |
|
"grad_norm": 0.3435121774673462, |
|
"learning_rate": 4.326510973181615e-06, |
|
"loss": 0.391, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.6639199529134785, |
|
"grad_norm": 0.3891783356666565, |
|
"learning_rate": 4.32206547478261e-06, |
|
"loss": 0.4129, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.668628605061801, |
|
"grad_norm": 0.39722615480422974, |
|
"learning_rate": 4.317607653010114e-06, |
|
"loss": 0.4223, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.6733372572101235, |
|
"grad_norm": 0.3996773958206177, |
|
"learning_rate": 4.313137538014199e-06, |
|
"loss": 0.4141, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.678045909358446, |
|
"grad_norm": 0.39691588282585144, |
|
"learning_rate": 4.308655160028078e-06, |
|
"loss": 0.3786, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.6827545615067687, |
|
"grad_norm": 0.3999384343624115, |
|
"learning_rate": 4.304160549367906e-06, |
|
"loss": 0.4311, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.687463213655091, |
|
"grad_norm": 0.4473234713077545, |
|
"learning_rate": 4.299653736432572e-06, |
|
"loss": 0.4319, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.6921718658034137, |
|
"grad_norm": 0.38774654269218445, |
|
"learning_rate": 4.295134751703493e-06, |
|
"loss": 0.4166, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.6968805179517363, |
|
"grad_norm": 0.41464555263519287, |
|
"learning_rate": 4.290603625744411e-06, |
|
"loss": 0.4141, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.7015891701000587, |
|
"grad_norm": 0.3974348306655884, |
|
"learning_rate": 4.286060389201181e-06, |
|
"loss": 0.4265, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.7062978222483816, |
|
"grad_norm": 0.3926202356815338, |
|
"learning_rate": 4.28150507280157e-06, |
|
"loss": 0.4078, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.711006474396704, |
|
"grad_norm": 0.37342771887779236, |
|
"learning_rate": 4.276937707355044e-06, |
|
"loss": 0.3894, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.7157151265450263, |
|
"grad_norm": 0.37231311202049255, |
|
"learning_rate": 4.272358323752563e-06, |
|
"loss": 0.414, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.7204237786933492, |
|
"grad_norm": 0.3970523774623871, |
|
"learning_rate": 4.267766952966369e-06, |
|
"loss": 0.4008, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.7251324308416716, |
|
"grad_norm": 0.3824027180671692, |
|
"learning_rate": 4.26316362604978e-06, |
|
"loss": 0.4174, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.729841082989994, |
|
"grad_norm": 0.38245680928230286, |
|
"learning_rate": 4.258548374136976e-06, |
|
"loss": 0.431, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.7345497351383168, |
|
"grad_norm": 0.38463136553764343, |
|
"learning_rate": 4.253921228442793e-06, |
|
"loss": 0.4076, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.7392583872866392, |
|
"grad_norm": 0.3690101206302643, |
|
"learning_rate": 4.249282220262507e-06, |
|
"loss": 0.3978, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.7439670394349618, |
|
"grad_norm": 0.3739013969898224, |
|
"learning_rate": 4.244631380971629e-06, |
|
"loss": 0.4205, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.7486756915832844, |
|
"grad_norm": 0.3831653594970703, |
|
"learning_rate": 4.239968742025684e-06, |
|
"loss": 0.3965, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.7533843437316068, |
|
"grad_norm": 0.3808199465274811, |
|
"learning_rate": 4.235294334960005e-06, |
|
"loss": 0.408, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.7580929958799294, |
|
"grad_norm": 0.40046992897987366, |
|
"learning_rate": 4.230608191389518e-06, |
|
"loss": 0.407, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.762801648028252, |
|
"grad_norm": 0.37953296303749084, |
|
"learning_rate": 4.225910343008527e-06, |
|
"loss": 0.4187, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.7675103001765744, |
|
"grad_norm": 0.4310925006866455, |
|
"learning_rate": 4.2212008215905e-06, |
|
"loss": 0.4241, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.772218952324897, |
|
"grad_norm": 0.383286714553833, |
|
"learning_rate": 4.216479658987856e-06, |
|
"loss": 0.3998, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.7769276044732196, |
|
"grad_norm": 0.41131308674812317, |
|
"learning_rate": 4.211746887131747e-06, |
|
"loss": 0.3963, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.781636256621542, |
|
"grad_norm": 0.3883269727230072, |
|
"learning_rate": 4.207002538031843e-06, |
|
"loss": 0.4004, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.7863449087698646, |
|
"grad_norm": 0.39173465967178345, |
|
"learning_rate": 4.202246643776116e-06, |
|
"loss": 0.3966, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.7910535609181872, |
|
"grad_norm": 0.402128130197525, |
|
"learning_rate": 4.1974792365306235e-06, |
|
"loss": 0.3988, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.7957622130665096, |
|
"grad_norm": 0.3661210238933563, |
|
"learning_rate": 4.192700348539288e-06, |
|
"loss": 0.3783, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.8004708652148322, |
|
"grad_norm": 0.3758254051208496, |
|
"learning_rate": 4.187910012123683e-06, |
|
"loss": 0.4074, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.8051795173631549, |
|
"grad_norm": 0.37446528673171997, |
|
"learning_rate": 4.183108259682811e-06, |
|
"loss": 0.4084, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.8098881695114772, |
|
"grad_norm": 0.4256710410118103, |
|
"learning_rate": 4.178295123692886e-06, |
|
"loss": 0.4184, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.8145968216597999, |
|
"grad_norm": 0.3782687783241272, |
|
"learning_rate": 4.1734706367071155e-06, |
|
"loss": 0.4129, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.8193054738081225, |
|
"grad_norm": 0.43555963039398193, |
|
"learning_rate": 4.168634831355474e-06, |
|
"loss": 0.4243, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.8240141259564449, |
|
"grad_norm": 0.3523005247116089, |
|
"learning_rate": 4.163787740344492e-06, |
|
"loss": 0.3873, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.8287227781047675, |
|
"grad_norm": 0.36786767840385437, |
|
"learning_rate": 4.1589293964570255e-06, |
|
"loss": 0.3882, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.83343143025309, |
|
"grad_norm": 0.3621681332588196, |
|
"learning_rate": 4.154059832552041e-06, |
|
"loss": 0.3717, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.8381400824014125, |
|
"grad_norm": 0.349269837141037, |
|
"learning_rate": 4.149179081564388e-06, |
|
"loss": 0.3681, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.8428487345497353, |
|
"grad_norm": 0.38650140166282654, |
|
"learning_rate": 4.144287176504582e-06, |
|
"loss": 0.4063, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.8475573866980577, |
|
"grad_norm": 0.4057513475418091, |
|
"learning_rate": 4.139384150458575e-06, |
|
"loss": 0.4189, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.85226603884638, |
|
"grad_norm": 0.39712995290756226, |
|
"learning_rate": 4.134470036587536e-06, |
|
"loss": 0.4077, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.856974690994703, |
|
"grad_norm": 0.38149163126945496, |
|
"learning_rate": 4.129544868127626e-06, |
|
"loss": 0.4199, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.8616833431430253, |
|
"grad_norm": 0.41102463006973267, |
|
"learning_rate": 4.124608678389772e-06, |
|
"loss": 0.3964, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.8663919952913477, |
|
"grad_norm": 0.3947491943836212, |
|
"learning_rate": 4.119661500759442e-06, |
|
"loss": 0.418, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.8711006474396705, |
|
"grad_norm": 0.3679056465625763, |
|
"learning_rate": 4.114703368696421e-06, |
|
"loss": 0.3903, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.875809299587993, |
|
"grad_norm": 0.37078574299812317, |
|
"learning_rate": 4.109734315734584e-06, |
|
"loss": 0.4044, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.8805179517363155, |
|
"grad_norm": 0.38189971446990967, |
|
"learning_rate": 4.104754375481665e-06, |
|
"loss": 0.4182, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.8852266038846381, |
|
"grad_norm": 0.37885355949401855, |
|
"learning_rate": 4.0997635816190354e-06, |
|
"loss": 0.4146, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.8899352560329605, |
|
"grad_norm": 0.38311830163002014, |
|
"learning_rate": 4.094761967901474e-06, |
|
"loss": 0.4101, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.8946439081812831, |
|
"grad_norm": 0.3592222332954407, |
|
"learning_rate": 4.0897495681569385e-06, |
|
"loss": 0.4124, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.8993525603296058, |
|
"grad_norm": 0.449899822473526, |
|
"learning_rate": 4.084726416286338e-06, |
|
"loss": 0.4011, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.9040612124779281, |
|
"grad_norm": 0.3764320909976959, |
|
"learning_rate": 4.079692546263299e-06, |
|
"loss": 0.4068, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.9087698646262508, |
|
"grad_norm": 0.39345216751098633, |
|
"learning_rate": 4.074647992133946e-06, |
|
"loss": 0.3942, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.9134785167745734, |
|
"grad_norm": 0.4699568450450897, |
|
"learning_rate": 4.069592788016657e-06, |
|
"loss": 0.4245, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.9181871689228958, |
|
"grad_norm": 0.3634001910686493, |
|
"learning_rate": 4.064526968101844e-06, |
|
"loss": 0.4236, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.9228958210712184, |
|
"grad_norm": 0.3803250193595886, |
|
"learning_rate": 4.059450566651719e-06, |
|
"loss": 0.4097, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.927604473219541, |
|
"grad_norm": 0.39286285638809204, |
|
"learning_rate": 4.054363618000058e-06, |
|
"loss": 0.4107, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.9323131253678634, |
|
"grad_norm": 0.40081989765167236, |
|
"learning_rate": 4.049266156551975e-06, |
|
"loss": 0.4439, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.937021777516186, |
|
"grad_norm": 0.3855789601802826, |
|
"learning_rate": 4.044158216783684e-06, |
|
"loss": 0.4104, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.9417304296645086, |
|
"grad_norm": 0.33973613381385803, |
|
"learning_rate": 4.03903983324227e-06, |
|
"loss": 0.411, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.946439081812831, |
|
"grad_norm": 0.3574191927909851, |
|
"learning_rate": 4.0339110405454535e-06, |
|
"loss": 0.4028, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.9511477339611536, |
|
"grad_norm": 0.3561255633831024, |
|
"learning_rate": 4.028771873381354e-06, |
|
"loss": 0.4045, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.9558563861094762, |
|
"grad_norm": 0.3795716464519501, |
|
"learning_rate": 4.02362236650826e-06, |
|
"loss": 0.4019, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.9605650382577986, |
|
"grad_norm": 0.40633508563041687, |
|
"learning_rate": 4.018462554754393e-06, |
|
"loss": 0.3952, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.9652736904061212, |
|
"grad_norm": 0.35927334427833557, |
|
"learning_rate": 4.013292473017666e-06, |
|
"loss": 0.4009, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.9699823425544438, |
|
"grad_norm": 0.39236533641815186, |
|
"learning_rate": 4.008112156265455e-06, |
|
"loss": 0.4515, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.9746909947027662, |
|
"grad_norm": 0.41305258870124817, |
|
"learning_rate": 4.002921639534362e-06, |
|
"loss": 0.4234, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.9793996468510888, |
|
"grad_norm": 0.36336320638656616, |
|
"learning_rate": 3.997720957929971e-06, |
|
"loss": 0.3876, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.9841082989994114, |
|
"grad_norm": 0.5077244639396667, |
|
"learning_rate": 3.992510146626618e-06, |
|
"loss": 0.4333, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.9888169511477338, |
|
"grad_norm": 0.3557424247264862, |
|
"learning_rate": 3.987289240867148e-06, |
|
"loss": 0.3786, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.9935256032960567, |
|
"grad_norm": 0.37336745858192444, |
|
"learning_rate": 3.9820582759626825e-06, |
|
"loss": 0.3905, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.998234255444379, |
|
"grad_norm": 0.3858487010002136, |
|
"learning_rate": 3.976817287292374e-06, |
|
"loss": 0.4188, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.3858487010002136, |
|
"learning_rate": 3.971566310303171e-06, |
|
"loss": 0.395, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.0047086521483224, |
|
"grad_norm": 0.7181822061538696, |
|
"learning_rate": 3.966305380509576e-06, |
|
"loss": 0.373, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.0094173042966452, |
|
"grad_norm": 0.367645800113678, |
|
"learning_rate": 3.96103453349341e-06, |
|
"loss": 0.3708, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.0141259564449676, |
|
"grad_norm": 0.4022150933742523, |
|
"learning_rate": 3.955753804903562e-06, |
|
"loss": 0.3693, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.01883460859329, |
|
"grad_norm": 0.3676547408103943, |
|
"learning_rate": 3.950463230455762e-06, |
|
"loss": 0.3889, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.023543260741613, |
|
"grad_norm": 0.41727522015571594, |
|
"learning_rate": 3.945162845932323e-06, |
|
"loss": 0.3666, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.0282519128899352, |
|
"grad_norm": 0.396017849445343, |
|
"learning_rate": 3.939852687181916e-06, |
|
"loss": 0.3768, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.0329605650382576, |
|
"grad_norm": 0.3791135549545288, |
|
"learning_rate": 3.9345327901193104e-06, |
|
"loss": 0.3507, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.0376692171865805, |
|
"grad_norm": 0.3801550269126892, |
|
"learning_rate": 3.929203190725147e-06, |
|
"loss": 0.3858, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.042377869334903, |
|
"grad_norm": 0.3655739724636078, |
|
"learning_rate": 3.923863925045685e-06, |
|
"loss": 0.3745, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.0470865214832252, |
|
"grad_norm": 0.37982434034347534, |
|
"learning_rate": 3.918515029192559e-06, |
|
"loss": 0.358, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.051795173631548, |
|
"grad_norm": 0.36431175470352173, |
|
"learning_rate": 3.9131565393425385e-06, |
|
"loss": 0.3559, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.0565038257798705, |
|
"grad_norm": 0.3713436722755432, |
|
"learning_rate": 3.907788491737281e-06, |
|
"loss": 0.3753, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.061212477928193, |
|
"grad_norm": 0.3815770447254181, |
|
"learning_rate": 3.902410922683084e-06, |
|
"loss": 0.3456, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.0659211300765157, |
|
"grad_norm": 0.34248778223991394, |
|
"learning_rate": 3.897023868550649e-06, |
|
"loss": 0.3591, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.070629782224838, |
|
"grad_norm": 0.37305399775505066, |
|
"learning_rate": 3.89162736577482e-06, |
|
"loss": 0.3974, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.075338434373161, |
|
"grad_norm": 0.41609179973602295, |
|
"learning_rate": 3.886221450854355e-06, |
|
"loss": 0.3372, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.0800470865214833, |
|
"grad_norm": 0.3434038460254669, |
|
"learning_rate": 3.880806160351663e-06, |
|
"loss": 0.3597, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.0847557386698057, |
|
"grad_norm": 0.38777461647987366, |
|
"learning_rate": 3.875381530892569e-06, |
|
"loss": 0.3713, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.0894643908181285, |
|
"grad_norm": 0.48137035965919495, |
|
"learning_rate": 3.869947599166059e-06, |
|
"loss": 0.3803, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.094173042966451, |
|
"grad_norm": 0.4451170563697815, |
|
"learning_rate": 3.864504401924031e-06, |
|
"loss": 0.3766, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.0988816951147733, |
|
"grad_norm": 0.38987934589385986, |
|
"learning_rate": 3.859051975981056e-06, |
|
"loss": 0.3683, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.103590347263096, |
|
"grad_norm": 0.3817046582698822, |
|
"learning_rate": 3.853590358214119e-06, |
|
"loss": 0.3545, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.1082989994114185, |
|
"grad_norm": 0.3981139361858368, |
|
"learning_rate": 3.848119585562371e-06, |
|
"loss": 0.3532, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.113007651559741, |
|
"grad_norm": 0.3644142150878906, |
|
"learning_rate": 3.842639695026885e-06, |
|
"loss": 0.3739, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.1177163037080637, |
|
"grad_norm": 0.37675121426582336, |
|
"learning_rate": 3.8371507236704e-06, |
|
"loss": 0.3777, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.122424955856386, |
|
"grad_norm": 0.3827580213546753, |
|
"learning_rate": 3.831652708617073e-06, |
|
"loss": 0.379, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.1271336080047085, |
|
"grad_norm": 0.39171484112739563, |
|
"learning_rate": 3.826145687052225e-06, |
|
"loss": 0.3711, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.1318422601530314, |
|
"grad_norm": 0.3718264400959015, |
|
"learning_rate": 3.820629696222096e-06, |
|
"loss": 0.3604, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.1365509123013537, |
|
"grad_norm": 0.5579127669334412, |
|
"learning_rate": 3.815104773433585e-06, |
|
"loss": 0.3893, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.141259564449676, |
|
"grad_norm": 0.4234530031681061, |
|
"learning_rate": 3.809570956054004e-06, |
|
"loss": 0.3794, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.145968216597999, |
|
"grad_norm": 0.3809348940849304, |
|
"learning_rate": 3.8040282815108196e-06, |
|
"loss": 0.3808, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.1506768687463214, |
|
"grad_norm": 0.4055100381374359, |
|
"learning_rate": 3.7984767872914076e-06, |
|
"loss": 0.372, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.1553855208946437, |
|
"grad_norm": 0.36842137575149536, |
|
"learning_rate": 3.79291651094279e-06, |
|
"loss": 0.37, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.1600941730429666, |
|
"grad_norm": 0.35666927695274353, |
|
"learning_rate": 3.78734749007139e-06, |
|
"loss": 0.358, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.164802825191289, |
|
"grad_norm": 0.33630210161209106, |
|
"learning_rate": 3.7817697623427707e-06, |
|
"loss": 0.3551, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.1695114773396114, |
|
"grad_norm": 0.35024064779281616, |
|
"learning_rate": 3.776183365481385e-06, |
|
"loss": 0.3696, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.174220129487934, |
|
"grad_norm": 0.40014365315437317, |
|
"learning_rate": 3.7705883372703177e-06, |
|
"loss": 0.379, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.1789287816362566, |
|
"grad_norm": 0.3819674253463745, |
|
"learning_rate": 3.7649847155510314e-06, |
|
"loss": 0.3687, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.183637433784579, |
|
"grad_norm": 0.3578038215637207, |
|
"learning_rate": 3.7593725382231107e-06, |
|
"loss": 0.3694, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.188346085932902, |
|
"grad_norm": 0.35197731852531433, |
|
"learning_rate": 3.7537518432440034e-06, |
|
"loss": 0.3647, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.193054738081224, |
|
"grad_norm": 0.3819369673728943, |
|
"learning_rate": 3.7481226686287676e-06, |
|
"loss": 0.3761, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.1977633902295466, |
|
"grad_norm": 0.3859822750091553, |
|
"learning_rate": 3.742485052449812e-06, |
|
"loss": 0.3709, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.2024720423778694, |
|
"grad_norm": 0.3550996482372284, |
|
"learning_rate": 3.736839032836638e-06, |
|
"loss": 0.3708, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.207180694526192, |
|
"grad_norm": 0.4063422977924347, |
|
"learning_rate": 3.731184647975585e-06, |
|
"loss": 0.3898, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.211889346674514, |
|
"grad_norm": 0.36760836839675903, |
|
"learning_rate": 3.725521936109567e-06, |
|
"loss": 0.3679, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.216597998822837, |
|
"grad_norm": 0.37566909193992615, |
|
"learning_rate": 3.7198509355378208e-06, |
|
"loss": 0.3659, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.2213066509711594, |
|
"grad_norm": 0.3421824276447296, |
|
"learning_rate": 3.7141716846156406e-06, |
|
"loss": 0.3556, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.226015303119482, |
|
"grad_norm": 0.36609965562820435, |
|
"learning_rate": 3.70848422175412e-06, |
|
"loss": 0.3769, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.2307239552678046, |
|
"grad_norm": 0.3647328019142151, |
|
"learning_rate": 3.702788585419896e-06, |
|
"loss": 0.391, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.235432607416127, |
|
"grad_norm": 0.3609878122806549, |
|
"learning_rate": 3.6970848141348857e-06, |
|
"loss": 0.3516, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.24014125956445, |
|
"grad_norm": 0.36001378297805786, |
|
"learning_rate": 3.6913729464760235e-06, |
|
"loss": 0.3583, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.2448499117127723, |
|
"grad_norm": 0.3540048897266388, |
|
"learning_rate": 3.6856530210750063e-06, |
|
"loss": 0.3841, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.2495585638610947, |
|
"grad_norm": 0.3976086974143982, |
|
"learning_rate": 3.679925076618027e-06, |
|
"loss": 0.3686, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.2542672160094175, |
|
"grad_norm": 0.3643709421157837, |
|
"learning_rate": 3.674189151845515e-06, |
|
"loss": 0.3686, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.25897586815774, |
|
"grad_norm": 0.38158276677131653, |
|
"learning_rate": 3.668445285551874e-06, |
|
"loss": 0.3621, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.2636845203060623, |
|
"grad_norm": 0.3460032343864441, |
|
"learning_rate": 3.662693516585219e-06, |
|
"loss": 0.3837, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.268393172454385, |
|
"grad_norm": 0.3750056326389313, |
|
"learning_rate": 3.6569338838471137e-06, |
|
"loss": 0.3531, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.2731018246027075, |
|
"grad_norm": 0.3516237139701843, |
|
"learning_rate": 3.6511664262923096e-06, |
|
"loss": 0.3937, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.27781047675103, |
|
"grad_norm": 0.3862423300743103, |
|
"learning_rate": 3.645391182928479e-06, |
|
"loss": 0.3679, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.2825191288993527, |
|
"grad_norm": 0.34053516387939453, |
|
"learning_rate": 3.6396081928159514e-06, |
|
"loss": 0.3742, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.287227781047675, |
|
"grad_norm": 0.3533380925655365, |
|
"learning_rate": 3.6338174950674555e-06, |
|
"loss": 0.3806, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.2919364331959975, |
|
"grad_norm": 0.40273189544677734, |
|
"learning_rate": 3.6280191288478437e-06, |
|
"loss": 0.3668, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.2966450853443203, |
|
"grad_norm": 0.35413995385169983, |
|
"learning_rate": 3.622213133373839e-06, |
|
"loss": 0.3591, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.3013537374926427, |
|
"grad_norm": 0.33633658289909363, |
|
"learning_rate": 3.6163995479137605e-06, |
|
"loss": 0.3739, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.306062389640965, |
|
"grad_norm": 0.3628805875778198, |
|
"learning_rate": 3.6105784117872633e-06, |
|
"loss": 0.3657, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.310771041789288, |
|
"grad_norm": 0.34639039635658264, |
|
"learning_rate": 3.604749764365069e-06, |
|
"loss": 0.3684, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.3154796939376103, |
|
"grad_norm": 0.37136930227279663, |
|
"learning_rate": 3.5989136450687035e-06, |
|
"loss": 0.3991, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.3201883460859327, |
|
"grad_norm": 0.4088273048400879, |
|
"learning_rate": 3.593070093370227e-06, |
|
"loss": 0.382, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.3248969982342556, |
|
"grad_norm": 0.39885440468788147, |
|
"learning_rate": 3.5872191487919662e-06, |
|
"loss": 0.3816, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.329605650382578, |
|
"grad_norm": 0.36275333166122437, |
|
"learning_rate": 3.581360850906253e-06, |
|
"loss": 0.3491, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.3343143025309008, |
|
"grad_norm": 0.3472638726234436, |
|
"learning_rate": 3.575495239335148e-06, |
|
"loss": 0.3589, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.339022954679223, |
|
"grad_norm": 0.3820279836654663, |
|
"learning_rate": 3.5696223537501817e-06, |
|
"loss": 0.3826, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.3437316068275456, |
|
"grad_norm": 0.3917175829410553, |
|
"learning_rate": 3.5637422338720785e-06, |
|
"loss": 0.3624, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.3484402589758684, |
|
"grad_norm": 0.3817431628704071, |
|
"learning_rate": 3.5578549194704916e-06, |
|
"loss": 0.3846, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.353148911124191, |
|
"grad_norm": 0.358780175447464, |
|
"learning_rate": 3.5519604503637362e-06, |
|
"loss": 0.3693, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.357857563272513, |
|
"grad_norm": 0.3502993583679199, |
|
"learning_rate": 3.5460588664185138e-06, |
|
"loss": 0.3618, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.362566215420836, |
|
"grad_norm": 0.35879674553871155, |
|
"learning_rate": 3.5401502075496506e-06, |
|
"loss": 0.3847, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.3672748675691584, |
|
"grad_norm": 0.38985323905944824, |
|
"learning_rate": 3.534234513719821e-06, |
|
"loss": 0.3622, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.371983519717481, |
|
"grad_norm": 0.34615063667297363, |
|
"learning_rate": 3.528311824939279e-06, |
|
"loss": 0.3682, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.3766921718658036, |
|
"grad_norm": 0.35256028175354004, |
|
"learning_rate": 3.5223821812655906e-06, |
|
"loss": 0.3681, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.381400824014126, |
|
"grad_norm": 0.3890756666660309, |
|
"learning_rate": 3.5164456228033583e-06, |
|
"loss": 0.356, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.3861094761624484, |
|
"grad_norm": 0.3554323613643646, |
|
"learning_rate": 3.510502189703955e-06, |
|
"loss": 0.3812, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.3908181283107712, |
|
"grad_norm": 0.3922102153301239, |
|
"learning_rate": 3.504551922165246e-06, |
|
"loss": 0.3658, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.3955267804590936, |
|
"grad_norm": 0.3660562336444855, |
|
"learning_rate": 3.4985948604313243e-06, |
|
"loss": 0.3775, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.400235432607416, |
|
"grad_norm": 0.36027809977531433, |
|
"learning_rate": 3.492631044792232e-06, |
|
"loss": 0.3703, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.404944084755739, |
|
"grad_norm": 0.3643735647201538, |
|
"learning_rate": 3.4866605155836913e-06, |
|
"loss": 0.3538, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.4096527369040612, |
|
"grad_norm": 0.36816880106925964, |
|
"learning_rate": 3.480683313186833e-06, |
|
"loss": 0.3819, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.4143613890523836, |
|
"grad_norm": 0.3588556945323944, |
|
"learning_rate": 3.474699478027918e-06, |
|
"loss": 0.3531, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.4190700412007065, |
|
"grad_norm": 0.3689442574977875, |
|
"learning_rate": 3.4687090505780707e-06, |
|
"loss": 0.3831, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.423778693349029, |
|
"grad_norm": 0.37948766350746155, |
|
"learning_rate": 3.4627120713529986e-06, |
|
"loss": 0.3383, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.4284873454973512, |
|
"grad_norm": 0.3453068435192108, |
|
"learning_rate": 3.4567085809127247e-06, |
|
"loss": 0.3733, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.433195997645674, |
|
"grad_norm": 0.3405013382434845, |
|
"learning_rate": 3.450698619861308e-06, |
|
"loss": 0.3748, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.4379046497939965, |
|
"grad_norm": 0.364133358001709, |
|
"learning_rate": 3.444682228846571e-06, |
|
"loss": 0.3774, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.442613301942319, |
|
"grad_norm": 0.3705088198184967, |
|
"learning_rate": 3.4386594485598257e-06, |
|
"loss": 0.3658, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.4473219540906417, |
|
"grad_norm": 0.36289486289024353, |
|
"learning_rate": 3.4326303197355955e-06, |
|
"loss": 0.3869, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.452030606238964, |
|
"grad_norm": 0.40453988313674927, |
|
"learning_rate": 3.426594883151344e-06, |
|
"loss": 0.3551, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.4567392583872865, |
|
"grad_norm": 0.34340184926986694, |
|
"learning_rate": 3.4205531796271953e-06, |
|
"loss": 0.3661, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 2.4614479105356093, |
|
"grad_norm": 0.37485653162002563, |
|
"learning_rate": 3.4145052500256595e-06, |
|
"loss": 0.3637, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 2.4661565626839317, |
|
"grad_norm": 0.3690657913684845, |
|
"learning_rate": 3.4084511352513557e-06, |
|
"loss": 0.3726, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.470865214832254, |
|
"grad_norm": 0.41782814264297485, |
|
"learning_rate": 3.402390876250737e-06, |
|
"loss": 0.3766, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 2.475573866980577, |
|
"grad_norm": 0.3712344169616699, |
|
"learning_rate": 3.3963245140118116e-06, |
|
"loss": 0.3825, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 2.4802825191288993, |
|
"grad_norm": 0.40473493933677673, |
|
"learning_rate": 3.3902520895638674e-06, |
|
"loss": 0.3752, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 2.4849911712772217, |
|
"grad_norm": 0.38602185249328613, |
|
"learning_rate": 3.3841736439771926e-06, |
|
"loss": 0.3885, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 2.4896998234255445, |
|
"grad_norm": 0.40137943625450134, |
|
"learning_rate": 3.3780892183627977e-06, |
|
"loss": 0.3641, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.494408475573867, |
|
"grad_norm": 0.3662640154361725, |
|
"learning_rate": 3.371998853872142e-06, |
|
"loss": 0.3968, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.4991171277221893, |
|
"grad_norm": 0.4132643938064575, |
|
"learning_rate": 3.365902591696848e-06, |
|
"loss": 0.3614, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 2.503825779870512, |
|
"grad_norm": 0.35764506459236145, |
|
"learning_rate": 3.3598004730684287e-06, |
|
"loss": 0.3765, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 2.5085344320188345, |
|
"grad_norm": 0.4405362904071808, |
|
"learning_rate": 3.3536925392580066e-06, |
|
"loss": 0.3579, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 2.513243084167157, |
|
"grad_norm": 0.38784265518188477, |
|
"learning_rate": 3.3475788315760344e-06, |
|
"loss": 0.3801, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.5179517363154797, |
|
"grad_norm": 0.3985704183578491, |
|
"learning_rate": 3.3414593913720163e-06, |
|
"loss": 0.3645, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 2.522660388463802, |
|
"grad_norm": 0.3759413957595825, |
|
"learning_rate": 3.3353342600342263e-06, |
|
"loss": 0.3976, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 2.5273690406121245, |
|
"grad_norm": 0.3630805015563965, |
|
"learning_rate": 3.329203478989431e-06, |
|
"loss": 0.3709, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 2.5320776927604474, |
|
"grad_norm": 0.35673877596855164, |
|
"learning_rate": 3.32306708970261e-06, |
|
"loss": 0.3843, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 2.5367863449087698, |
|
"grad_norm": 0.43144941329956055, |
|
"learning_rate": 3.31692513367667e-06, |
|
"loss": 0.3495, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.541494997057092, |
|
"grad_norm": 0.4056400656700134, |
|
"learning_rate": 3.310777652452171e-06, |
|
"loss": 0.3768, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 2.546203649205415, |
|
"grad_norm": 0.3569755256175995, |
|
"learning_rate": 3.304624687607041e-06, |
|
"loss": 0.372, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 2.5509123013537374, |
|
"grad_norm": 0.3697240352630615, |
|
"learning_rate": 3.298466280756295e-06, |
|
"loss": 0.3829, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 2.55562095350206, |
|
"grad_norm": 0.3861812353134155, |
|
"learning_rate": 3.2923024735517575e-06, |
|
"loss": 0.3752, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 2.5603296056503826, |
|
"grad_norm": 0.40910521149635315, |
|
"learning_rate": 3.286133307681773e-06, |
|
"loss": 0.3814, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.565038257798705, |
|
"grad_norm": 0.3837888538837433, |
|
"learning_rate": 3.2799588248709346e-06, |
|
"loss": 0.3923, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 2.569746909947028, |
|
"grad_norm": 0.39909642934799194, |
|
"learning_rate": 3.2737790668797907e-06, |
|
"loss": 0.3834, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 2.57445556209535, |
|
"grad_norm": 0.3567914664745331, |
|
"learning_rate": 3.2675940755045717e-06, |
|
"loss": 0.354, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 2.5791642142436726, |
|
"grad_norm": 0.3389582633972168, |
|
"learning_rate": 3.261403892576901e-06, |
|
"loss": 0.3763, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.5838728663919954, |
|
"grad_norm": 0.41281071305274963, |
|
"learning_rate": 3.255208559963517e-06, |
|
"loss": 0.3846, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.588581518540318, |
|
"grad_norm": 0.40536680817604065, |
|
"learning_rate": 3.2490081195659858e-06, |
|
"loss": 0.3766, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 2.5932901706886407, |
|
"grad_norm": 0.3592185080051422, |
|
"learning_rate": 3.2428026133204187e-06, |
|
"loss": 0.3508, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 2.597998822836963, |
|
"grad_norm": 0.3820286691188812, |
|
"learning_rate": 3.2365920831971925e-06, |
|
"loss": 0.3753, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 2.6027074749852854, |
|
"grad_norm": 0.36180534958839417, |
|
"learning_rate": 3.230376571200659e-06, |
|
"loss": 0.3611, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 2.6074161271336083, |
|
"grad_norm": 0.38467833399772644, |
|
"learning_rate": 3.2241561193688673e-06, |
|
"loss": 0.3751, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.6121247792819307, |
|
"grad_norm": 0.3356960117816925, |
|
"learning_rate": 3.2179307697732753e-06, |
|
"loss": 0.3549, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 2.616833431430253, |
|
"grad_norm": 0.367064893245697, |
|
"learning_rate": 3.2117005645184668e-06, |
|
"loss": 0.4021, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 2.621542083578576, |
|
"grad_norm": 0.39036962389945984, |
|
"learning_rate": 3.205465545741865e-06, |
|
"loss": 0.3493, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.6262507357268983, |
|
"grad_norm": 0.34206390380859375, |
|
"learning_rate": 3.199225755613452e-06, |
|
"loss": 0.3913, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 2.6309593878752207, |
|
"grad_norm": 0.3853800296783447, |
|
"learning_rate": 3.1929812363354766e-06, |
|
"loss": 0.3516, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.6356680400235435, |
|
"grad_norm": 0.3480147421360016, |
|
"learning_rate": 3.186732030142177e-06, |
|
"loss": 0.3711, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 2.640376692171866, |
|
"grad_norm": 0.3732787072658539, |
|
"learning_rate": 3.180478179299487e-06, |
|
"loss": 0.3781, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 2.6450853443201883, |
|
"grad_norm": 0.3606315851211548, |
|
"learning_rate": 3.174219726104757e-06, |
|
"loss": 0.3601, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 2.649793996468511, |
|
"grad_norm": 0.35798346996307373, |
|
"learning_rate": 3.167956712886463e-06, |
|
"loss": 0.3765, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 2.6545026486168335, |
|
"grad_norm": 0.3613319396972656, |
|
"learning_rate": 3.161689182003924e-06, |
|
"loss": 0.3635, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.659211300765156, |
|
"grad_norm": 0.34538811445236206, |
|
"learning_rate": 3.1554171758470114e-06, |
|
"loss": 0.3575, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 2.6639199529134787, |
|
"grad_norm": 0.3859974443912506, |
|
"learning_rate": 3.1491407368358682e-06, |
|
"loss": 0.3805, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.668628605061801, |
|
"grad_norm": 0.364109605550766, |
|
"learning_rate": 3.142859907420615e-06, |
|
"loss": 0.3682, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 2.6733372572101235, |
|
"grad_norm": 0.36927446722984314, |
|
"learning_rate": 3.1365747300810693e-06, |
|
"loss": 0.3737, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 2.6780459093584463, |
|
"grad_norm": 0.38519367575645447, |
|
"learning_rate": 3.130285247326454e-06, |
|
"loss": 0.3788, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.6827545615067687, |
|
"grad_norm": 0.36593741178512573, |
|
"learning_rate": 3.12399150169511e-06, |
|
"loss": 0.3744, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 2.687463213655091, |
|
"grad_norm": 0.3665541112422943, |
|
"learning_rate": 3.1176935357542136e-06, |
|
"loss": 0.3641, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 2.692171865803414, |
|
"grad_norm": 0.35215452313423157, |
|
"learning_rate": 3.1113913920994803e-06, |
|
"loss": 0.3428, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 2.6968805179517363, |
|
"grad_norm": 0.3475217819213867, |
|
"learning_rate": 3.1050851133548854e-06, |
|
"loss": 0.3541, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 2.7015891701000587, |
|
"grad_norm": 0.35495033860206604, |
|
"learning_rate": 3.0987747421723678e-06, |
|
"loss": 0.3647, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.7062978222483816, |
|
"grad_norm": 0.3700904846191406, |
|
"learning_rate": 3.0924603212315475e-06, |
|
"loss": 0.3758, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.711006474396704, |
|
"grad_norm": 0.3657042682170868, |
|
"learning_rate": 3.0861418932394344e-06, |
|
"loss": 0.3644, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 2.7157151265450263, |
|
"grad_norm": 0.350494384765625, |
|
"learning_rate": 3.0798195009301385e-06, |
|
"loss": 0.3724, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 2.720423778693349, |
|
"grad_norm": 0.3735278248786926, |
|
"learning_rate": 3.0734931870645844e-06, |
|
"loss": 0.3653, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 2.7251324308416716, |
|
"grad_norm": 0.35524168610572815, |
|
"learning_rate": 3.0671629944302168e-06, |
|
"loss": 0.3526, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.729841082989994, |
|
"grad_norm": 0.35376468300819397, |
|
"learning_rate": 3.0608289658407165e-06, |
|
"loss": 0.3601, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 2.734549735138317, |
|
"grad_norm": 0.3474513292312622, |
|
"learning_rate": 3.054491144135707e-06, |
|
"loss": 0.3796, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.739258387286639, |
|
"grad_norm": 0.3583899438381195, |
|
"learning_rate": 3.0481495721804683e-06, |
|
"loss": 0.3577, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 2.7439670394349616, |
|
"grad_norm": 0.34981340169906616, |
|
"learning_rate": 3.0418042928656416e-06, |
|
"loss": 0.3727, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.7486756915832844, |
|
"grad_norm": 0.34836986660957336, |
|
"learning_rate": 3.035455349106945e-06, |
|
"loss": 0.3737, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.753384343731607, |
|
"grad_norm": 0.3657343089580536, |
|
"learning_rate": 3.0291027838448793e-06, |
|
"loss": 0.3694, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.758092995879929, |
|
"grad_norm": 0.36264339089393616, |
|
"learning_rate": 3.022746640044441e-06, |
|
"loss": 0.3314, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 2.762801648028252, |
|
"grad_norm": 0.32704487442970276, |
|
"learning_rate": 3.0163869606948276e-06, |
|
"loss": 0.3826, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.7675103001765744, |
|
"grad_norm": 0.37806132435798645, |
|
"learning_rate": 3.0100237888091493e-06, |
|
"loss": 0.3816, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 2.772218952324897, |
|
"grad_norm": 0.3723447620868683, |
|
"learning_rate": 3.0036571674241393e-06, |
|
"loss": 0.3776, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.7769276044732196, |
|
"grad_norm": 0.3555864691734314, |
|
"learning_rate": 2.997287139599859e-06, |
|
"loss": 0.3956, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 2.781636256621542, |
|
"grad_norm": 0.3734078109264374, |
|
"learning_rate": 2.9909137484194113e-06, |
|
"loss": 0.3913, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.7863449087698644, |
|
"grad_norm": 0.3938598930835724, |
|
"learning_rate": 2.984537036988644e-06, |
|
"loss": 0.377, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 2.7910535609181872, |
|
"grad_norm": 0.4086543321609497, |
|
"learning_rate": 2.9781570484358634e-06, |
|
"loss": 0.3846, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.7957622130665096, |
|
"grad_norm": 0.3646213710308075, |
|
"learning_rate": 2.9717738259115403e-06, |
|
"loss": 0.3559, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.800470865214832, |
|
"grad_norm": 0.35105398297309875, |
|
"learning_rate": 2.965387412588017e-06, |
|
"loss": 0.3567, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.805179517363155, |
|
"grad_norm": 0.36393171548843384, |
|
"learning_rate": 2.9589978516592164e-06, |
|
"loss": 0.3653, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 2.8098881695114772, |
|
"grad_norm": 0.34818121790885925, |
|
"learning_rate": 2.952605186340352e-06, |
|
"loss": 0.3786, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.8145968216597996, |
|
"grad_norm": 0.4391355812549591, |
|
"learning_rate": 2.9462094598676298e-06, |
|
"loss": 0.3702, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 2.8193054738081225, |
|
"grad_norm": 0.37328672409057617, |
|
"learning_rate": 2.9398107154979637e-06, |
|
"loss": 0.3575, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.824014125956445, |
|
"grad_norm": 0.3919398784637451, |
|
"learning_rate": 2.9334089965086765e-06, |
|
"loss": 0.3779, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 2.8287227781047672, |
|
"grad_norm": 0.3888506293296814, |
|
"learning_rate": 2.9270043461972104e-06, |
|
"loss": 0.3684, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.83343143025309, |
|
"grad_norm": 0.3589399456977844, |
|
"learning_rate": 2.920596807880832e-06, |
|
"loss": 0.3716, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 2.8381400824014125, |
|
"grad_norm": 0.37757810950279236, |
|
"learning_rate": 2.9141864248963433e-06, |
|
"loss": 0.3924, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.8428487345497353, |
|
"grad_norm": 0.37059202790260315, |
|
"learning_rate": 2.9077732405997834e-06, |
|
"loss": 0.3805, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.8475573866980577, |
|
"grad_norm": 0.38783740997314453, |
|
"learning_rate": 2.9013572983661382e-06, |
|
"loss": 0.3586, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.85226603884638, |
|
"grad_norm": 0.3548484146595001, |
|
"learning_rate": 2.8949386415890486e-06, |
|
"loss": 0.3559, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 2.856974690994703, |
|
"grad_norm": 0.36312374472618103, |
|
"learning_rate": 2.8885173136805126e-06, |
|
"loss": 0.3662, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.8616833431430253, |
|
"grad_norm": 0.3536983132362366, |
|
"learning_rate": 2.882093358070597e-06, |
|
"loss": 0.3674, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 2.8663919952913477, |
|
"grad_norm": 0.3506225347518921, |
|
"learning_rate": 2.8756668182071364e-06, |
|
"loss": 0.3916, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.8711006474396705, |
|
"grad_norm": 0.37823688983917236, |
|
"learning_rate": 2.869237737555448e-06, |
|
"loss": 0.3695, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 2.875809299587993, |
|
"grad_norm": 0.3676494061946869, |
|
"learning_rate": 2.8628061595980323e-06, |
|
"loss": 0.376, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.8805179517363158, |
|
"grad_norm": 0.3870524764060974, |
|
"learning_rate": 2.856372127834279e-06, |
|
"loss": 0.3725, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 2.885226603884638, |
|
"grad_norm": 0.4425090253353119, |
|
"learning_rate": 2.8499356857801747e-06, |
|
"loss": 0.3688, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.8899352560329605, |
|
"grad_norm": 0.3530326187610626, |
|
"learning_rate": 2.843496876968007e-06, |
|
"loss": 0.3619, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.8946439081812834, |
|
"grad_norm": 0.3702718913555145, |
|
"learning_rate": 2.837055744946072e-06, |
|
"loss": 0.3791, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.8993525603296058, |
|
"grad_norm": 0.37180230021476746, |
|
"learning_rate": 2.8306123332783793e-06, |
|
"loss": 0.3692, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 2.904061212477928, |
|
"grad_norm": 0.3533918559551239, |
|
"learning_rate": 2.8241666855443527e-06, |
|
"loss": 0.3567, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.908769864626251, |
|
"grad_norm": 0.3372352719306946, |
|
"learning_rate": 2.8177188453385445e-06, |
|
"loss": 0.3761, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 2.9134785167745734, |
|
"grad_norm": 0.3824419379234314, |
|
"learning_rate": 2.811268856270332e-06, |
|
"loss": 0.3925, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.9181871689228958, |
|
"grad_norm": 0.36836275458335876, |
|
"learning_rate": 2.8048167619636287e-06, |
|
"loss": 0.3481, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 2.9228958210712186, |
|
"grad_norm": 0.343496173620224, |
|
"learning_rate": 2.7983626060565833e-06, |
|
"loss": 0.3725, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.927604473219541, |
|
"grad_norm": 0.3810299336910248, |
|
"learning_rate": 2.7919064322012922e-06, |
|
"loss": 0.347, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 2.9323131253678634, |
|
"grad_norm": 0.3711015284061432, |
|
"learning_rate": 2.785448284063497e-06, |
|
"loss": 0.3632, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.937021777516186, |
|
"grad_norm": 0.3700212836265564, |
|
"learning_rate": 2.778988205322292e-06, |
|
"loss": 0.3812, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.9417304296645086, |
|
"grad_norm": 0.44081220030784607, |
|
"learning_rate": 2.772526239669831e-06, |
|
"loss": 0.3225, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.946439081812831, |
|
"grad_norm": 0.314218670129776, |
|
"learning_rate": 2.7660624308110293e-06, |
|
"loss": 0.3503, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 2.951147733961154, |
|
"grad_norm": 0.35966551303863525, |
|
"learning_rate": 2.7595968224632672e-06, |
|
"loss": 0.4044, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.955856386109476, |
|
"grad_norm": 0.4296911656856537, |
|
"learning_rate": 2.7531294583560968e-06, |
|
"loss": 0.3862, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 2.9605650382577986, |
|
"grad_norm": 0.3642086088657379, |
|
"learning_rate": 2.746660382230944e-06, |
|
"loss": 0.3549, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.9652736904061214, |
|
"grad_norm": 0.3510330021381378, |
|
"learning_rate": 2.740189637840816e-06, |
|
"loss": 0.3491, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 2.969982342554444, |
|
"grad_norm": 0.34799647331237793, |
|
"learning_rate": 2.73371726895e-06, |
|
"loss": 0.3785, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.974690994702766, |
|
"grad_norm": 0.3923639953136444, |
|
"learning_rate": 2.727243319333774e-06, |
|
"loss": 0.3686, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 2.979399646851089, |
|
"grad_norm": 0.3745047152042389, |
|
"learning_rate": 2.720767832778104e-06, |
|
"loss": 0.3814, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.9841082989994114, |
|
"grad_norm": 0.3802978992462158, |
|
"learning_rate": 2.7142908530793523e-06, |
|
"loss": 0.3692, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.988816951147734, |
|
"grad_norm": 0.38678938150405884, |
|
"learning_rate": 2.7078124240439795e-06, |
|
"loss": 0.3703, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.9935256032960567, |
|
"grad_norm": 0.3405422866344452, |
|
"learning_rate": 2.701332589488249e-06, |
|
"loss": 0.3778, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 2.998234255444379, |
|
"grad_norm": 0.36346471309661865, |
|
"learning_rate": 2.694851393237931e-06, |
|
"loss": 0.3601, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.5746592879295349, |
|
"learning_rate": 2.688368879128004e-06, |
|
"loss": 0.3771, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 3.0047086521483224, |
|
"grad_norm": 0.49112632870674133, |
|
"learning_rate": 2.68188509100236e-06, |
|
"loss": 0.332, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.0094173042966452, |
|
"grad_norm": 0.3928791284561157, |
|
"learning_rate": 2.675400072713508e-06, |
|
"loss": 0.3545, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 3.0141259564449676, |
|
"grad_norm": 0.36599087715148926, |
|
"learning_rate": 2.6689138681222793e-06, |
|
"loss": 0.3242, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 3.01883460859329, |
|
"grad_norm": 0.3617115914821625, |
|
"learning_rate": 2.6624265210975232e-06, |
|
"loss": 0.3175, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 3.023543260741613, |
|
"grad_norm": 0.39354369044303894, |
|
"learning_rate": 2.655938075515821e-06, |
|
"loss": 0.3444, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 3.0282519128899352, |
|
"grad_norm": 0.40006500482559204, |
|
"learning_rate": 2.649448575261182e-06, |
|
"loss": 0.3396, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.0329605650382576, |
|
"grad_norm": 0.37696173787117004, |
|
"learning_rate": 2.6429580642247478e-06, |
|
"loss": 0.3492, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 3.0376692171865805, |
|
"grad_norm": 0.4124061167240143, |
|
"learning_rate": 2.6364665863044984e-06, |
|
"loss": 0.3512, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 3.042377869334903, |
|
"grad_norm": 0.3561529219150543, |
|
"learning_rate": 2.629974185404951e-06, |
|
"loss": 0.3312, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 3.0470865214832252, |
|
"grad_norm": 0.4442432224750519, |
|
"learning_rate": 2.623480905436867e-06, |
|
"loss": 0.3456, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 3.051795173631548, |
|
"grad_norm": 0.359831839799881, |
|
"learning_rate": 2.6169867903169528e-06, |
|
"loss": 0.3253, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.0565038257798705, |
|
"grad_norm": 0.424878865480423, |
|
"learning_rate": 2.610491883967562e-06, |
|
"loss": 0.3518, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 3.061212477928193, |
|
"grad_norm": 0.37126424908638, |
|
"learning_rate": 2.6039962303164024e-06, |
|
"loss": 0.3327, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 3.0659211300765157, |
|
"grad_norm": 0.394135445356369, |
|
"learning_rate": 2.5974998732962324e-06, |
|
"loss": 0.328, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 3.070629782224838, |
|
"grad_norm": 0.3592108488082886, |
|
"learning_rate": 2.5910028568445724e-06, |
|
"loss": 0.3294, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 3.075338434373161, |
|
"grad_norm": 0.3625464141368866, |
|
"learning_rate": 2.584505224903397e-06, |
|
"loss": 0.3415, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.0800470865214833, |
|
"grad_norm": 0.3701664209365845, |
|
"learning_rate": 2.578007021418848e-06, |
|
"loss": 0.3416, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 3.0847557386698057, |
|
"grad_norm": 0.35726526379585266, |
|
"learning_rate": 2.571508290340932e-06, |
|
"loss": 0.3283, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 3.0894643908181285, |
|
"grad_norm": 0.35142824053764343, |
|
"learning_rate": 2.5650090756232227e-06, |
|
"loss": 0.3253, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 3.094173042966451, |
|
"grad_norm": 0.3839946985244751, |
|
"learning_rate": 2.5585094212225646e-06, |
|
"loss": 0.3233, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 3.0988816951147733, |
|
"grad_norm": 0.3479538857936859, |
|
"learning_rate": 2.5520093710987785e-06, |
|
"loss": 0.3306, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.103590347263096, |
|
"grad_norm": 0.36447158455848694, |
|
"learning_rate": 2.5455089692143593e-06, |
|
"loss": 0.3431, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 3.1082989994114185, |
|
"grad_norm": 0.3611982762813568, |
|
"learning_rate": 2.539008259534182e-06, |
|
"loss": 0.3345, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 3.113007651559741, |
|
"grad_norm": 0.35439133644104004, |
|
"learning_rate": 2.5325072860252016e-06, |
|
"loss": 0.3425, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 3.1177163037080637, |
|
"grad_norm": 0.3768078684806824, |
|
"learning_rate": 2.526006092656161e-06, |
|
"loss": 0.3444, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 3.122424955856386, |
|
"grad_norm": 0.38114500045776367, |
|
"learning_rate": 2.5195047233972856e-06, |
|
"loss": 0.349, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.1271336080047085, |
|
"grad_norm": 0.4064490497112274, |
|
"learning_rate": 2.5130032222199956e-06, |
|
"loss": 0.3698, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 3.1318422601530314, |
|
"grad_norm": 0.35883423686027527, |
|
"learning_rate": 2.5065016330965985e-06, |
|
"loss": 0.3194, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 3.1365509123013537, |
|
"grad_norm": 0.35054928064346313, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.328, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 3.141259564449676, |
|
"grad_norm": 0.34279653429985046, |
|
"learning_rate": 2.493498366903402e-06, |
|
"loss": 0.3407, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 3.145968216597999, |
|
"grad_norm": 0.37415286898612976, |
|
"learning_rate": 2.4869967777800057e-06, |
|
"loss": 0.3459, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.1506768687463214, |
|
"grad_norm": 0.3572427034378052, |
|
"learning_rate": 2.480495276602714e-06, |
|
"loss": 0.3294, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 3.1553855208946437, |
|
"grad_norm": 0.3583246171474457, |
|
"learning_rate": 2.47399390734384e-06, |
|
"loss": 0.3342, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 3.1600941730429666, |
|
"grad_norm": 0.35233941674232483, |
|
"learning_rate": 2.467492713974799e-06, |
|
"loss": 0.3351, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 3.164802825191289, |
|
"grad_norm": 0.36822763085365295, |
|
"learning_rate": 2.4609917404658198e-06, |
|
"loss": 0.3422, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 3.1695114773396114, |
|
"grad_norm": 0.35386916995048523, |
|
"learning_rate": 2.4544910307856415e-06, |
|
"loss": 0.3372, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.174220129487934, |
|
"grad_norm": 0.3638751208782196, |
|
"learning_rate": 2.447990628901222e-06, |
|
"loss": 0.3344, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 3.1789287816362566, |
|
"grad_norm": 0.3515467047691345, |
|
"learning_rate": 2.441490578777436e-06, |
|
"loss": 0.3302, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 3.183637433784579, |
|
"grad_norm": 0.3626198172569275, |
|
"learning_rate": 2.434990924376778e-06, |
|
"loss": 0.3488, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 3.188346085932902, |
|
"grad_norm": 0.3596525490283966, |
|
"learning_rate": 2.428491709659069e-06, |
|
"loss": 0.3231, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 3.193054738081224, |
|
"grad_norm": 0.37080711126327515, |
|
"learning_rate": 2.421992978581152e-06, |
|
"loss": 0.3353, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.1977633902295466, |
|
"grad_norm": 0.3591707944869995, |
|
"learning_rate": 2.4154947750966035e-06, |
|
"loss": 0.34, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 3.2024720423778694, |
|
"grad_norm": 0.3675726354122162, |
|
"learning_rate": 2.408997143155429e-06, |
|
"loss": 0.3358, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 3.207180694526192, |
|
"grad_norm": 0.3385520875453949, |
|
"learning_rate": 2.402500126703768e-06, |
|
"loss": 0.3151, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 3.211889346674514, |
|
"grad_norm": 0.38244402408599854, |
|
"learning_rate": 2.396003769683599e-06, |
|
"loss": 0.3489, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 3.216597998822837, |
|
"grad_norm": 0.35723546147346497, |
|
"learning_rate": 2.389508116032438e-06, |
|
"loss": 0.3407, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.2213066509711594, |
|
"grad_norm": 0.3416253328323364, |
|
"learning_rate": 2.3830132096830476e-06, |
|
"loss": 0.3124, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 3.226015303119482, |
|
"grad_norm": 0.3942345678806305, |
|
"learning_rate": 2.376519094563134e-06, |
|
"loss": 0.3166, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 3.2307239552678046, |
|
"grad_norm": 0.35811132192611694, |
|
"learning_rate": 2.3700258145950495e-06, |
|
"loss": 0.3317, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 3.235432607416127, |
|
"grad_norm": 0.35040491819381714, |
|
"learning_rate": 2.363533413695503e-06, |
|
"loss": 0.318, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 3.24014125956445, |
|
"grad_norm": 0.35471948981285095, |
|
"learning_rate": 2.3570419357752522e-06, |
|
"loss": 0.3278, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.2448499117127723, |
|
"grad_norm": 0.35799640417099, |
|
"learning_rate": 2.3505514247388185e-06, |
|
"loss": 0.3304, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 3.2495585638610947, |
|
"grad_norm": 0.3335202634334564, |
|
"learning_rate": 2.3440619244841798e-06, |
|
"loss": 0.3171, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 3.2542672160094175, |
|
"grad_norm": 0.42852750420570374, |
|
"learning_rate": 2.3375734789024776e-06, |
|
"loss": 0.332, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 3.25897586815774, |
|
"grad_norm": 0.37598463892936707, |
|
"learning_rate": 2.3310861318777216e-06, |
|
"loss": 0.3563, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 3.2636845203060623, |
|
"grad_norm": 0.3953750729560852, |
|
"learning_rate": 2.3245999272864924e-06, |
|
"loss": 0.3267, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.268393172454385, |
|
"grad_norm": 0.3668225407600403, |
|
"learning_rate": 2.3181149089976407e-06, |
|
"loss": 0.3204, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 3.2731018246027075, |
|
"grad_norm": 0.3694913387298584, |
|
"learning_rate": 2.311631120871997e-06, |
|
"loss": 0.3472, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 3.27781047675103, |
|
"grad_norm": 0.35975244641304016, |
|
"learning_rate": 2.30514860676207e-06, |
|
"loss": 0.339, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 3.2825191288993527, |
|
"grad_norm": 0.3759594261646271, |
|
"learning_rate": 2.298667410511751e-06, |
|
"loss": 0.3412, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 3.287227781047675, |
|
"grad_norm": 0.37436795234680176, |
|
"learning_rate": 2.2921875759560213e-06, |
|
"loss": 0.3279, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.2919364331959975, |
|
"grad_norm": 0.3595412075519562, |
|
"learning_rate": 2.2857091469206485e-06, |
|
"loss": 0.3427, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 3.2966450853443203, |
|
"grad_norm": 0.3599635660648346, |
|
"learning_rate": 2.279232167221897e-06, |
|
"loss": 0.3353, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 3.3013537374926427, |
|
"grad_norm": 0.35342299938201904, |
|
"learning_rate": 2.272756680666227e-06, |
|
"loss": 0.3212, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 3.306062389640965, |
|
"grad_norm": 0.37875843048095703, |
|
"learning_rate": 2.2662827310499995e-06, |
|
"loss": 0.3215, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 3.310771041789288, |
|
"grad_norm": 0.3415927290916443, |
|
"learning_rate": 2.2598103621591846e-06, |
|
"loss": 0.3317, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.3154796939376103, |
|
"grad_norm": 0.38792863488197327, |
|
"learning_rate": 2.2533396177690565e-06, |
|
"loss": 0.3326, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 3.3201883460859327, |
|
"grad_norm": 0.3674851655960083, |
|
"learning_rate": 2.2468705416439045e-06, |
|
"loss": 0.3306, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 3.3248969982342556, |
|
"grad_norm": 0.3733600974082947, |
|
"learning_rate": 2.2404031775367336e-06, |
|
"loss": 0.3438, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 3.329605650382578, |
|
"grad_norm": 0.37565818428993225, |
|
"learning_rate": 2.233937569188971e-06, |
|
"loss": 0.3412, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 3.3343143025309008, |
|
"grad_norm": 0.3375566303730011, |
|
"learning_rate": 2.2274737603301696e-06, |
|
"loss": 0.32, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.339022954679223, |
|
"grad_norm": 0.3649482727050781, |
|
"learning_rate": 2.2210117946777087e-06, |
|
"loss": 0.3354, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 3.3437316068275456, |
|
"grad_norm": 0.3563418686389923, |
|
"learning_rate": 2.2145517159365044e-06, |
|
"loss": 0.3369, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 3.3484402589758684, |
|
"grad_norm": 0.377107173204422, |
|
"learning_rate": 2.2080935677987086e-06, |
|
"loss": 0.3392, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 3.353148911124191, |
|
"grad_norm": 0.4007817804813385, |
|
"learning_rate": 2.2016373939434166e-06, |
|
"loss": 0.3316, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 3.357857563272513, |
|
"grad_norm": 0.359765887260437, |
|
"learning_rate": 2.1951832380363726e-06, |
|
"loss": 0.3307, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.362566215420836, |
|
"grad_norm": 0.33300676941871643, |
|
"learning_rate": 2.188731143729669e-06, |
|
"loss": 0.32, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 3.3672748675691584, |
|
"grad_norm": 0.36447378993034363, |
|
"learning_rate": 2.182281154661457e-06, |
|
"loss": 0.3478, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 3.371983519717481, |
|
"grad_norm": 0.33058932423591614, |
|
"learning_rate": 2.1758333144556473e-06, |
|
"loss": 0.2869, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 3.3766921718658036, |
|
"grad_norm": 0.363322913646698, |
|
"learning_rate": 2.1693876667216215e-06, |
|
"loss": 0.3304, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 3.381400824014126, |
|
"grad_norm": 0.34668809175491333, |
|
"learning_rate": 2.1629442550539283e-06, |
|
"loss": 0.3273, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.3861094761624484, |
|
"grad_norm": 0.3586620092391968, |
|
"learning_rate": 2.1565031230319937e-06, |
|
"loss": 0.3431, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 3.3908181283107712, |
|
"grad_norm": 0.33611762523651123, |
|
"learning_rate": 2.1500643142198265e-06, |
|
"loss": 0.3263, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 3.3955267804590936, |
|
"grad_norm": 0.3559138774871826, |
|
"learning_rate": 2.1436278721657216e-06, |
|
"loss": 0.3299, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 3.400235432607416, |
|
"grad_norm": 0.38343891501426697, |
|
"learning_rate": 2.137193840401968e-06, |
|
"loss": 0.3226, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 3.404944084755739, |
|
"grad_norm": 0.3612920045852661, |
|
"learning_rate": 2.130762262444553e-06, |
|
"loss": 0.3481, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.4096527369040612, |
|
"grad_norm": 0.34701016545295715, |
|
"learning_rate": 2.1243331817928645e-06, |
|
"loss": 0.3279, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 3.4143613890523836, |
|
"grad_norm": 0.32958680391311646, |
|
"learning_rate": 2.1179066419294043e-06, |
|
"loss": 0.3205, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 3.4190700412007065, |
|
"grad_norm": 0.37069225311279297, |
|
"learning_rate": 2.1114826863194882e-06, |
|
"loss": 0.338, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 3.423778693349029, |
|
"grad_norm": 0.3406302034854889, |
|
"learning_rate": 2.1050613584109518e-06, |
|
"loss": 0.3331, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 3.4284873454973512, |
|
"grad_norm": 0.32335618138313293, |
|
"learning_rate": 2.0986427016338626e-06, |
|
"loss": 0.3124, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.433195997645674, |
|
"grad_norm": 0.35719034075737, |
|
"learning_rate": 2.0922267594002182e-06, |
|
"loss": 0.3433, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 3.4379046497939965, |
|
"grad_norm": 0.3510840833187103, |
|
"learning_rate": 2.085813575103657e-06, |
|
"loss": 0.3244, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 3.442613301942319, |
|
"grad_norm": 0.3347994387149811, |
|
"learning_rate": 2.0794031921191683e-06, |
|
"loss": 0.3289, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 3.4473219540906417, |
|
"grad_norm": 0.3473336696624756, |
|
"learning_rate": 2.072995653802791e-06, |
|
"loss": 0.3251, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 3.452030606238964, |
|
"grad_norm": 0.3559500277042389, |
|
"learning_rate": 2.0665910034913243e-06, |
|
"loss": 0.3256, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.4567392583872865, |
|
"grad_norm": 0.3657936155796051, |
|
"learning_rate": 2.060189284502037e-06, |
|
"loss": 0.3485, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 3.4614479105356093, |
|
"grad_norm": 0.346377968788147, |
|
"learning_rate": 2.05379054013237e-06, |
|
"loss": 0.3246, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 3.4661565626839317, |
|
"grad_norm": 0.3639799654483795, |
|
"learning_rate": 2.0473948136596487e-06, |
|
"loss": 0.3281, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 3.470865214832254, |
|
"grad_norm": 0.35108593106269836, |
|
"learning_rate": 2.041002148340784e-06, |
|
"loss": 0.3182, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 3.475573866980577, |
|
"grad_norm": 0.3662627637386322, |
|
"learning_rate": 2.034612587411984e-06, |
|
"loss": 0.3391, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.4802825191288993, |
|
"grad_norm": 0.37259167432785034, |
|
"learning_rate": 2.02822617408846e-06, |
|
"loss": 0.3422, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 3.4849911712772217, |
|
"grad_norm": 0.3728717267513275, |
|
"learning_rate": 2.021842951564137e-06, |
|
"loss": 0.3438, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 3.4896998234255445, |
|
"grad_norm": 0.36524805426597595, |
|
"learning_rate": 2.015462963011357e-06, |
|
"loss": 0.3286, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 3.494408475573867, |
|
"grad_norm": 0.3473794758319855, |
|
"learning_rate": 2.0090862515805896e-06, |
|
"loss": 0.3347, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 3.4991171277221893, |
|
"grad_norm": 0.35428687930107117, |
|
"learning_rate": 2.0027128604001417e-06, |
|
"loss": 0.3229, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.503825779870512, |
|
"grad_norm": 0.3526255190372467, |
|
"learning_rate": 1.9963428325758615e-06, |
|
"loss": 0.3407, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 3.5085344320188345, |
|
"grad_norm": 0.4010574519634247, |
|
"learning_rate": 1.989976211190851e-06, |
|
"loss": 0.3357, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 3.513243084167157, |
|
"grad_norm": 0.3535029888153076, |
|
"learning_rate": 1.983613039305173e-06, |
|
"loss": 0.3479, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 3.5179517363154797, |
|
"grad_norm": 0.36888188123703003, |
|
"learning_rate": 1.97725335995556e-06, |
|
"loss": 0.3457, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 3.522660388463802, |
|
"grad_norm": 0.35986262559890747, |
|
"learning_rate": 1.9708972161551215e-06, |
|
"loss": 0.3304, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.5273690406121245, |
|
"grad_norm": 0.35900959372520447, |
|
"learning_rate": 1.9645446508930556e-06, |
|
"loss": 0.3415, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 3.5320776927604474, |
|
"grad_norm": 0.39998960494995117, |
|
"learning_rate": 1.9581957071343593e-06, |
|
"loss": 0.3662, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 3.5367863449087698, |
|
"grad_norm": 0.3455185890197754, |
|
"learning_rate": 1.951850427819533e-06, |
|
"loss": 0.3445, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 3.541494997057092, |
|
"grad_norm": 0.35920363664627075, |
|
"learning_rate": 1.9455088558642932e-06, |
|
"loss": 0.3572, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 3.546203649205415, |
|
"grad_norm": 0.3520212471485138, |
|
"learning_rate": 1.9391710341592847e-06, |
|
"loss": 0.3413, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.5509123013537374, |
|
"grad_norm": 0.36848849058151245, |
|
"learning_rate": 1.9328370055697836e-06, |
|
"loss": 0.3304, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 3.55562095350206, |
|
"grad_norm": 0.3467768132686615, |
|
"learning_rate": 1.9265068129354164e-06, |
|
"loss": 0.3133, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 3.5603296056503826, |
|
"grad_norm": 0.38214123249053955, |
|
"learning_rate": 1.920180499069862e-06, |
|
"loss": 0.353, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 3.565038257798705, |
|
"grad_norm": 0.33890092372894287, |
|
"learning_rate": 1.9138581067605664e-06, |
|
"loss": 0.3144, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 3.569746909947028, |
|
"grad_norm": 0.36359307169914246, |
|
"learning_rate": 1.9075396787684534e-06, |
|
"loss": 0.3176, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.57445556209535, |
|
"grad_norm": 0.3256242871284485, |
|
"learning_rate": 1.901225257827633e-06, |
|
"loss": 0.3186, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 3.5791642142436726, |
|
"grad_norm": 0.33996862173080444, |
|
"learning_rate": 1.8949148866451154e-06, |
|
"loss": 0.329, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 3.5838728663919954, |
|
"grad_norm": 0.38446974754333496, |
|
"learning_rate": 1.8886086079005201e-06, |
|
"loss": 0.3502, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 3.588581518540318, |
|
"grad_norm": 0.33746618032455444, |
|
"learning_rate": 1.8823064642457877e-06, |
|
"loss": 0.3172, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 3.5932901706886407, |
|
"grad_norm": 0.3711576759815216, |
|
"learning_rate": 1.8760084983048898e-06, |
|
"loss": 0.3489, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.597998822836963, |
|
"grad_norm": 0.36693471670150757, |
|
"learning_rate": 1.8697147526735468e-06, |
|
"loss": 0.3418, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 3.6027074749852854, |
|
"grad_norm": 0.40748727321624756, |
|
"learning_rate": 1.8634252699189311e-06, |
|
"loss": 0.3443, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 3.6074161271336083, |
|
"grad_norm": 0.3639236092567444, |
|
"learning_rate": 1.8571400925793855e-06, |
|
"loss": 0.3396, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 3.6121247792819307, |
|
"grad_norm": 0.35893839597702026, |
|
"learning_rate": 1.8508592631641328e-06, |
|
"loss": 0.3369, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 3.616833431430253, |
|
"grad_norm": 0.3774881064891815, |
|
"learning_rate": 1.8445828241529884e-06, |
|
"loss": 0.3311, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.621542083578576, |
|
"grad_norm": 0.3896530866622925, |
|
"learning_rate": 1.8383108179960769e-06, |
|
"loss": 0.337, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 3.6262507357268983, |
|
"grad_norm": 0.365604966878891, |
|
"learning_rate": 1.8320432871135378e-06, |
|
"loss": 0.3357, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 3.6309593878752207, |
|
"grad_norm": 0.7592654228210449, |
|
"learning_rate": 1.825780273895244e-06, |
|
"loss": 0.3632, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 3.6356680400235435, |
|
"grad_norm": 0.4070909023284912, |
|
"learning_rate": 1.8195218207005139e-06, |
|
"loss": 0.3431, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 3.640376692171866, |
|
"grad_norm": 0.3568059206008911, |
|
"learning_rate": 1.8132679698578238e-06, |
|
"loss": 0.3218, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.6450853443201883, |
|
"grad_norm": 0.3874458074569702, |
|
"learning_rate": 1.807018763664524e-06, |
|
"loss": 0.3487, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 3.649793996468511, |
|
"grad_norm": 0.6760424971580505, |
|
"learning_rate": 1.800774244386549e-06, |
|
"loss": 0.3396, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 3.6545026486168335, |
|
"grad_norm": 0.3860941529273987, |
|
"learning_rate": 1.7945344542581356e-06, |
|
"loss": 0.3687, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 3.659211300765156, |
|
"grad_norm": 0.399251252412796, |
|
"learning_rate": 1.7882994354815343e-06, |
|
"loss": 0.3452, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 3.6639199529134787, |
|
"grad_norm": 0.3464195430278778, |
|
"learning_rate": 1.7820692302267251e-06, |
|
"loss": 0.3219, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.668628605061801, |
|
"grad_norm": 0.33919650316238403, |
|
"learning_rate": 1.7758438806311329e-06, |
|
"loss": 0.3099, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 3.6733372572101235, |
|
"grad_norm": 0.35061123967170715, |
|
"learning_rate": 1.7696234287993416e-06, |
|
"loss": 0.3229, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 3.6780459093584463, |
|
"grad_norm": 0.3360130488872528, |
|
"learning_rate": 1.763407916802809e-06, |
|
"loss": 0.3225, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 3.6827545615067687, |
|
"grad_norm": 0.3464799225330353, |
|
"learning_rate": 1.7571973866795817e-06, |
|
"loss": 0.3228, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 3.687463213655091, |
|
"grad_norm": 0.3488558232784271, |
|
"learning_rate": 1.750991880434015e-06, |
|
"loss": 0.3488, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.692171865803414, |
|
"grad_norm": 0.5446019768714905, |
|
"learning_rate": 1.7447914400364835e-06, |
|
"loss": 0.3491, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 3.6968805179517363, |
|
"grad_norm": 0.3364548981189728, |
|
"learning_rate": 1.7385961074230994e-06, |
|
"loss": 0.3192, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 3.7015891701000587, |
|
"grad_norm": 0.34718620777130127, |
|
"learning_rate": 1.7324059244954294e-06, |
|
"loss": 0.3368, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 3.7062978222483816, |
|
"grad_norm": 0.35897183418273926, |
|
"learning_rate": 1.7262209331202095e-06, |
|
"loss": 0.3414, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 3.711006474396704, |
|
"grad_norm": 0.37337997555732727, |
|
"learning_rate": 1.7200411751290662e-06, |
|
"loss": 0.3426, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.7157151265450263, |
|
"grad_norm": 0.34406691789627075, |
|
"learning_rate": 1.7138666923182274e-06, |
|
"loss": 0.319, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 3.720423778693349, |
|
"grad_norm": 0.36620885133743286, |
|
"learning_rate": 1.7076975264482434e-06, |
|
"loss": 0.3587, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 3.7251324308416716, |
|
"grad_norm": 0.351634681224823, |
|
"learning_rate": 1.7015337192437053e-06, |
|
"loss": 0.3332, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 3.729841082989994, |
|
"grad_norm": 0.386874794960022, |
|
"learning_rate": 1.6953753123929597e-06, |
|
"loss": 0.3433, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 3.734549735138317, |
|
"grad_norm": 0.34270480275154114, |
|
"learning_rate": 1.6892223475478293e-06, |
|
"loss": 0.3339, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.739258387286639, |
|
"grad_norm": 0.3725406229496002, |
|
"learning_rate": 1.6830748663233306e-06, |
|
"loss": 0.3551, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 3.7439670394349616, |
|
"grad_norm": 0.34803131222724915, |
|
"learning_rate": 1.6769329102973913e-06, |
|
"loss": 0.3187, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 3.7486756915832844, |
|
"grad_norm": 0.3435935080051422, |
|
"learning_rate": 1.670796521010569e-06, |
|
"loss": 0.3218, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 3.753384343731607, |
|
"grad_norm": 0.3839336931705475, |
|
"learning_rate": 1.6646657399657745e-06, |
|
"loss": 0.3581, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 3.758092995879929, |
|
"grad_norm": 0.37887904047966003, |
|
"learning_rate": 1.6585406086279847e-06, |
|
"loss": 0.3501, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.762801648028252, |
|
"grad_norm": 0.3566491901874542, |
|
"learning_rate": 1.652421168423966e-06, |
|
"loss": 0.3467, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 3.7675103001765744, |
|
"grad_norm": 0.37119993567466736, |
|
"learning_rate": 1.6463074607419943e-06, |
|
"loss": 0.3442, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 3.772218952324897, |
|
"grad_norm": 0.34793585538864136, |
|
"learning_rate": 1.6401995269315712e-06, |
|
"loss": 0.3411, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 3.7769276044732196, |
|
"grad_norm": 0.3532625734806061, |
|
"learning_rate": 1.6340974083031525e-06, |
|
"loss": 0.3417, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 3.781636256621542, |
|
"grad_norm": 0.36595654487609863, |
|
"learning_rate": 1.6280011461278586e-06, |
|
"loss": 0.3621, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.7863449087698644, |
|
"grad_norm": 0.365357905626297, |
|
"learning_rate": 1.6219107816372027e-06, |
|
"loss": 0.3328, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 3.7910535609181872, |
|
"grad_norm": 0.33471792936325073, |
|
"learning_rate": 1.6158263560228089e-06, |
|
"loss": 0.3258, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 3.7957622130665096, |
|
"grad_norm": 0.35250982642173767, |
|
"learning_rate": 1.6097479104361328e-06, |
|
"loss": 0.3396, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 3.800470865214832, |
|
"grad_norm": 0.3582373261451721, |
|
"learning_rate": 1.6036754859881888e-06, |
|
"loss": 0.3208, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 3.805179517363155, |
|
"grad_norm": 0.35405614972114563, |
|
"learning_rate": 1.5976091237492637e-06, |
|
"loss": 0.3355, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.8098881695114772, |
|
"grad_norm": 0.3517574071884155, |
|
"learning_rate": 1.5915488647486453e-06, |
|
"loss": 0.3364, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 3.8145968216597996, |
|
"grad_norm": 0.3884425163269043, |
|
"learning_rate": 1.5854947499743414e-06, |
|
"loss": 0.3548, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 3.8193054738081225, |
|
"grad_norm": 0.3678779900074005, |
|
"learning_rate": 1.5794468203728053e-06, |
|
"loss": 0.3499, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 3.824014125956445, |
|
"grad_norm": 0.3617662489414215, |
|
"learning_rate": 1.5734051168486565e-06, |
|
"loss": 0.3305, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 3.8287227781047672, |
|
"grad_norm": 0.33246317505836487, |
|
"learning_rate": 1.5673696802644053e-06, |
|
"loss": 0.3065, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.83343143025309, |
|
"grad_norm": 0.3756829798221588, |
|
"learning_rate": 1.5613405514401757e-06, |
|
"loss": 0.351, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 3.8381400824014125, |
|
"grad_norm": 0.3410739004611969, |
|
"learning_rate": 1.5553177711534296e-06, |
|
"loss": 0.32, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 3.8428487345497353, |
|
"grad_norm": 0.3635987937450409, |
|
"learning_rate": 1.5493013801386924e-06, |
|
"loss": 0.3337, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 3.8475573866980577, |
|
"grad_norm": 0.3543144166469574, |
|
"learning_rate": 1.5432914190872757e-06, |
|
"loss": 0.3409, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 3.85226603884638, |
|
"grad_norm": 0.3444572687149048, |
|
"learning_rate": 1.5372879286470022e-06, |
|
"loss": 0.3264, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.856974690994703, |
|
"grad_norm": 0.360392689704895, |
|
"learning_rate": 1.5312909494219308e-06, |
|
"loss": 0.338, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 3.8616833431430253, |
|
"grad_norm": 0.3705150783061981, |
|
"learning_rate": 1.5253005219720822e-06, |
|
"loss": 0.3071, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 3.8663919952913477, |
|
"grad_norm": 0.37416768074035645, |
|
"learning_rate": 1.519316686813168e-06, |
|
"loss": 0.3564, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 3.8711006474396705, |
|
"grad_norm": 0.3697960078716278, |
|
"learning_rate": 1.5133394844163093e-06, |
|
"loss": 0.3426, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 3.875809299587993, |
|
"grad_norm": 0.3538321554660797, |
|
"learning_rate": 1.5073689552077692e-06, |
|
"loss": 0.3291, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 3.8805179517363158, |
|
"grad_norm": 0.3262041211128235, |
|
"learning_rate": 1.5014051395686768e-06, |
|
"loss": 0.311, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 3.885226603884638, |
|
"grad_norm": 0.35173299908638, |
|
"learning_rate": 1.4954480778347543e-06, |
|
"loss": 0.3331, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 3.8899352560329605, |
|
"grad_norm": 0.36466389894485474, |
|
"learning_rate": 1.489497810296046e-06, |
|
"loss": 0.3419, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 3.8946439081812834, |
|
"grad_norm": 0.35103070735931396, |
|
"learning_rate": 1.4835543771966422e-06, |
|
"loss": 0.3433, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 3.8993525603296058, |
|
"grad_norm": 0.35364291071891785, |
|
"learning_rate": 1.4776178187344107e-06, |
|
"loss": 0.3171, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.904061212477928, |
|
"grad_norm": 0.3403826951980591, |
|
"learning_rate": 1.4716881750607215e-06, |
|
"loss": 0.3275, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 3.908769864626251, |
|
"grad_norm": 0.38063591718673706, |
|
"learning_rate": 1.4657654862801798e-06, |
|
"loss": 0.3314, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 3.9134785167745734, |
|
"grad_norm": 0.3796558976173401, |
|
"learning_rate": 1.4598497924503498e-06, |
|
"loss": 0.3609, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 3.9181871689228958, |
|
"grad_norm": 0.33824312686920166, |
|
"learning_rate": 1.4539411335814868e-06, |
|
"loss": 0.3182, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 3.9228958210712186, |
|
"grad_norm": 0.3416818082332611, |
|
"learning_rate": 1.4480395496362648e-06, |
|
"loss": 0.3303, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 3.927604473219541, |
|
"grad_norm": 0.3429725766181946, |
|
"learning_rate": 1.4421450805295084e-06, |
|
"loss": 0.3218, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 3.9323131253678634, |
|
"grad_norm": 0.3443184792995453, |
|
"learning_rate": 1.4362577661279225e-06, |
|
"loss": 0.3344, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 3.937021777516186, |
|
"grad_norm": 0.36939215660095215, |
|
"learning_rate": 1.4303776462498187e-06, |
|
"loss": 0.3574, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 3.9417304296645086, |
|
"grad_norm": 0.33150216937065125, |
|
"learning_rate": 1.4245047606648518e-06, |
|
"loss": 0.3155, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 3.946439081812831, |
|
"grad_norm": 0.3554579019546509, |
|
"learning_rate": 1.4186391490937482e-06, |
|
"loss": 0.3458, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.951147733961154, |
|
"grad_norm": 0.36509135365486145, |
|
"learning_rate": 1.4127808512080331e-06, |
|
"loss": 0.333, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 3.955856386109476, |
|
"grad_norm": 0.3694615960121155, |
|
"learning_rate": 1.406929906629774e-06, |
|
"loss": 0.3463, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 3.9605650382577986, |
|
"grad_norm": 0.3466503918170929, |
|
"learning_rate": 1.401086354931297e-06, |
|
"loss": 0.3342, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 3.9652736904061214, |
|
"grad_norm": 0.3704316020011902, |
|
"learning_rate": 1.3952502356349323e-06, |
|
"loss": 0.3585, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 3.969982342554444, |
|
"grad_norm": 0.36790791153907776, |
|
"learning_rate": 1.3894215882127371e-06, |
|
"loss": 0.3296, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 3.974690994702766, |
|
"grad_norm": 0.3574499189853668, |
|
"learning_rate": 1.3836004520862404e-06, |
|
"loss": 0.3374, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 3.979399646851089, |
|
"grad_norm": 0.3674068748950958, |
|
"learning_rate": 1.3777868666261619e-06, |
|
"loss": 0.3401, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 3.9841082989994114, |
|
"grad_norm": 0.3557773530483246, |
|
"learning_rate": 1.3719808711521573e-06, |
|
"loss": 0.3377, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 3.988816951147734, |
|
"grad_norm": 0.4119923412799835, |
|
"learning_rate": 1.3661825049325462e-06, |
|
"loss": 0.3274, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 3.9935256032960567, |
|
"grad_norm": 0.35379621386528015, |
|
"learning_rate": 1.3603918071840488e-06, |
|
"loss": 0.3417, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.998234255444379, |
|
"grad_norm": 0.35988909006118774, |
|
"learning_rate": 1.3546088170715222e-06, |
|
"loss": 0.3274, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 0.35988909006118774, |
|
"learning_rate": 1.3488335737076914e-06, |
|
"loss": 0.3568, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 4.004708652148323, |
|
"grad_norm": 0.682213544845581, |
|
"learning_rate": 1.343066116152887e-06, |
|
"loss": 0.3152, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 4.009417304296645, |
|
"grad_norm": 0.3923402428627014, |
|
"learning_rate": 1.3373064834147819e-06, |
|
"loss": 0.3242, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 4.014125956444968, |
|
"grad_norm": 0.3617793917655945, |
|
"learning_rate": 1.3315547144481263e-06, |
|
"loss": 0.3177, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.0188346085932904, |
|
"grad_norm": 0.3607487678527832, |
|
"learning_rate": 1.3258108481544849e-06, |
|
"loss": 0.3238, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 4.023543260741612, |
|
"grad_norm": 0.37872302532196045, |
|
"learning_rate": 1.3200749233819738e-06, |
|
"loss": 0.3383, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 4.028251912889935, |
|
"grad_norm": 0.3622148036956787, |
|
"learning_rate": 1.314346978924994e-06, |
|
"loss": 0.3033, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 4.032960565038258, |
|
"grad_norm": 0.3558484613895416, |
|
"learning_rate": 1.3086270535239776e-06, |
|
"loss": 0.3075, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 4.03766921718658, |
|
"grad_norm": 0.3856630027294159, |
|
"learning_rate": 1.3029151858651145e-06, |
|
"loss": 0.3165, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.042377869334903, |
|
"grad_norm": 0.38117724657058716, |
|
"learning_rate": 1.2972114145801046e-06, |
|
"loss": 0.3155, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 4.047086521483226, |
|
"grad_norm": 0.38278907537460327, |
|
"learning_rate": 1.2915157782458804e-06, |
|
"loss": 0.3145, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 4.051795173631548, |
|
"grad_norm": 0.3633948862552643, |
|
"learning_rate": 1.2858283153843608e-06, |
|
"loss": 0.2881, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 4.0565038257798705, |
|
"grad_norm": 0.35489869117736816, |
|
"learning_rate": 1.280149064462179e-06, |
|
"loss": 0.3093, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 4.061212477928193, |
|
"grad_norm": 0.3667081296443939, |
|
"learning_rate": 1.2744780638904336e-06, |
|
"loss": 0.3368, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.065921130076515, |
|
"grad_norm": 0.3660667836666107, |
|
"learning_rate": 1.2688153520244162e-06, |
|
"loss": 0.3093, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 4.070629782224838, |
|
"grad_norm": 0.36665669083595276, |
|
"learning_rate": 1.2631609671633632e-06, |
|
"loss": 0.2986, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 4.075338434373161, |
|
"grad_norm": 0.35874009132385254, |
|
"learning_rate": 1.2575149475501891e-06, |
|
"loss": 0.3223, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 4.080047086521483, |
|
"grad_norm": 0.38370969891548157, |
|
"learning_rate": 1.251877331371233e-06, |
|
"loss": 0.2954, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 4.084755738669806, |
|
"grad_norm": 0.3539574146270752, |
|
"learning_rate": 1.2462481567559966e-06, |
|
"loss": 0.2904, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.0894643908181285, |
|
"grad_norm": 0.36115357279777527, |
|
"learning_rate": 1.2406274617768893e-06, |
|
"loss": 0.3022, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 4.0941730429664505, |
|
"grad_norm": 0.3756008446216583, |
|
"learning_rate": 1.235015284448969e-06, |
|
"loss": 0.3245, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 4.098881695114773, |
|
"grad_norm": 0.3537841737270355, |
|
"learning_rate": 1.2294116627296827e-06, |
|
"loss": 0.2973, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 4.103590347263096, |
|
"grad_norm": 0.3482496738433838, |
|
"learning_rate": 1.2238166345186152e-06, |
|
"loss": 0.3072, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 4.108298999411418, |
|
"grad_norm": 0.35395577549934387, |
|
"learning_rate": 1.2182302376572295e-06, |
|
"loss": 0.3076, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.113007651559741, |
|
"grad_norm": 0.36247384548187256, |
|
"learning_rate": 1.212652509928611e-06, |
|
"loss": 0.3147, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 4.117716303708064, |
|
"grad_norm": 0.37728843092918396, |
|
"learning_rate": 1.2070834890572105e-06, |
|
"loss": 0.3286, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 4.122424955856386, |
|
"grad_norm": 0.3559524416923523, |
|
"learning_rate": 1.2015232127085932e-06, |
|
"loss": 0.2873, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 4.1271336080047085, |
|
"grad_norm": 0.34051647782325745, |
|
"learning_rate": 1.1959717184891803e-06, |
|
"loss": 0.3166, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 4.131842260153031, |
|
"grad_norm": 0.3496254086494446, |
|
"learning_rate": 1.1904290439459974e-06, |
|
"loss": 0.3051, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.136550912301354, |
|
"grad_norm": 0.346172958612442, |
|
"learning_rate": 1.1848952265664157e-06, |
|
"loss": 0.285, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 4.141259564449676, |
|
"grad_norm": 0.35215529799461365, |
|
"learning_rate": 1.1793703037779056e-06, |
|
"loss": 0.2974, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 4.145968216597999, |
|
"grad_norm": 0.3513922095298767, |
|
"learning_rate": 1.173854312947775e-06, |
|
"loss": 0.3216, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 4.150676868746322, |
|
"grad_norm": 0.3996025621891022, |
|
"learning_rate": 1.1683472913829286e-06, |
|
"loss": 0.308, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 4.155385520894644, |
|
"grad_norm": 0.3480626940727234, |
|
"learning_rate": 1.1628492763296006e-06, |
|
"loss": 0.3152, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.160094173042967, |
|
"grad_norm": 0.3710547089576721, |
|
"learning_rate": 1.1573603049731156e-06, |
|
"loss": 0.3255, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 4.164802825191289, |
|
"grad_norm": 0.3572455942630768, |
|
"learning_rate": 1.15188041443763e-06, |
|
"loss": 0.3115, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 4.169511477339611, |
|
"grad_norm": 0.45938190817832947, |
|
"learning_rate": 1.1464096417858821e-06, |
|
"loss": 0.3172, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 4.174220129487934, |
|
"grad_norm": 0.3499940037727356, |
|
"learning_rate": 1.1409480240189442e-06, |
|
"loss": 0.3136, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 4.178928781636257, |
|
"grad_norm": 0.35575103759765625, |
|
"learning_rate": 1.135495598075969e-06, |
|
"loss": 0.2879, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.183637433784579, |
|
"grad_norm": 0.33789294958114624, |
|
"learning_rate": 1.130052400833943e-06, |
|
"loss": 0.312, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 4.188346085932902, |
|
"grad_norm": 0.35234612226486206, |
|
"learning_rate": 1.1246184691074317e-06, |
|
"loss": 0.2925, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 4.193054738081225, |
|
"grad_norm": 0.34792086482048035, |
|
"learning_rate": 1.119193839648337e-06, |
|
"loss": 0.3019, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 4.197763390229547, |
|
"grad_norm": 0.34613871574401855, |
|
"learning_rate": 1.1137785491456455e-06, |
|
"loss": 0.286, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 4.202472042377869, |
|
"grad_norm": 0.3518754839897156, |
|
"learning_rate": 1.1083726342251802e-06, |
|
"loss": 0.3076, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.207180694526192, |
|
"grad_norm": 0.3639785647392273, |
|
"learning_rate": 1.102976131449352e-06, |
|
"loss": 0.331, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 4.211889346674514, |
|
"grad_norm": 0.3593629002571106, |
|
"learning_rate": 1.0975890773169156e-06, |
|
"loss": 0.3312, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 4.216597998822837, |
|
"grad_norm": 0.37032291293144226, |
|
"learning_rate": 1.0922115082627197e-06, |
|
"loss": 0.3361, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 4.22130665097116, |
|
"grad_norm": 0.3786473274230957, |
|
"learning_rate": 1.0868434606574621e-06, |
|
"loss": 0.3094, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 4.226015303119482, |
|
"grad_norm": 0.35703983902931213, |
|
"learning_rate": 1.0814849708074416e-06, |
|
"loss": 0.297, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.230723955267805, |
|
"grad_norm": 0.3440445363521576, |
|
"learning_rate": 1.0761360749543158e-06, |
|
"loss": 0.2927, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 4.2354326074161275, |
|
"grad_norm": 0.3606710433959961, |
|
"learning_rate": 1.070796809274853e-06, |
|
"loss": 0.3129, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 4.240141259564449, |
|
"grad_norm": 0.35009413957595825, |
|
"learning_rate": 1.0654672098806906e-06, |
|
"loss": 0.2728, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 4.244849911712772, |
|
"grad_norm": 0.3345305919647217, |
|
"learning_rate": 1.0601473128180855e-06, |
|
"loss": 0.2922, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 4.249558563861095, |
|
"grad_norm": 0.3608264625072479, |
|
"learning_rate": 1.0548371540676768e-06, |
|
"loss": 0.3196, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.254267216009417, |
|
"grad_norm": 0.48483705520629883, |
|
"learning_rate": 1.0495367695442394e-06, |
|
"loss": 0.3183, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 4.25897586815774, |
|
"grad_norm": 0.3715311288833618, |
|
"learning_rate": 1.0442461950964373e-06, |
|
"loss": 0.3269, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 4.263684520306063, |
|
"grad_norm": 0.3627394139766693, |
|
"learning_rate": 1.038965466506591e-06, |
|
"loss": 0.3047, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 4.268393172454385, |
|
"grad_norm": 0.3572857975959778, |
|
"learning_rate": 1.033694619490424e-06, |
|
"loss": 0.2977, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 4.2731018246027075, |
|
"grad_norm": 0.3349437117576599, |
|
"learning_rate": 1.0284336896968306e-06, |
|
"loss": 0.2997, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.27781047675103, |
|
"grad_norm": 0.3537830412387848, |
|
"learning_rate": 1.0231827127076272e-06, |
|
"loss": 0.3101, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 4.282519128899352, |
|
"grad_norm": 0.36906588077545166, |
|
"learning_rate": 1.0179417240373181e-06, |
|
"loss": 0.3291, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 4.287227781047675, |
|
"grad_norm": 0.39401233196258545, |
|
"learning_rate": 1.0127107591328523e-06, |
|
"loss": 0.3171, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 4.291936433195998, |
|
"grad_norm": 0.36123374104499817, |
|
"learning_rate": 1.0074898533733834e-06, |
|
"loss": 0.3193, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 4.29664508534432, |
|
"grad_norm": 0.36208614706993103, |
|
"learning_rate": 1.0022790420700298e-06, |
|
"loss": 0.2998, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.301353737492643, |
|
"grad_norm": 0.34652724862098694, |
|
"learning_rate": 9.970783604656384e-07, |
|
"loss": 0.3012, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 4.3060623896409655, |
|
"grad_norm": 0.3494437038898468, |
|
"learning_rate": 9.918878437345445e-07, |
|
"loss": 0.3079, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 4.3107710417892875, |
|
"grad_norm": 0.3824324905872345, |
|
"learning_rate": 9.867075269823354e-07, |
|
"loss": 0.2922, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 4.31547969393761, |
|
"grad_norm": 0.3468039035797119, |
|
"learning_rate": 9.81537445245608e-07, |
|
"loss": 0.3119, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 4.320188346085933, |
|
"grad_norm": 0.3550470471382141, |
|
"learning_rate": 9.763776334917398e-07, |
|
"loss": 0.2967, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.324896998234255, |
|
"grad_norm": 0.36313554644584656, |
|
"learning_rate": 9.712281266186463e-07, |
|
"loss": 0.2942, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 4.329605650382578, |
|
"grad_norm": 0.3467397093772888, |
|
"learning_rate": 9.66088959454547e-07, |
|
"loss": 0.311, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 4.334314302530901, |
|
"grad_norm": 0.335706502199173, |
|
"learning_rate": 9.609601667577304e-07, |
|
"loss": 0.2938, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 4.339022954679223, |
|
"grad_norm": 0.3451956510543823, |
|
"learning_rate": 9.558417832163164e-07, |
|
"loss": 0.3155, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 4.3437316068275456, |
|
"grad_norm": 0.3699392080307007, |
|
"learning_rate": 9.507338434480265e-07, |
|
"loss": 0.3255, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.348440258975868, |
|
"grad_norm": 0.364408940076828, |
|
"learning_rate": 9.45636381999942e-07, |
|
"loss": 0.289, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 4.35314891112419, |
|
"grad_norm": 0.33332404494285583, |
|
"learning_rate": 9.405494333482817e-07, |
|
"loss": 0.2991, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 4.357857563272513, |
|
"grad_norm": 0.36770814657211304, |
|
"learning_rate": 9.354730318981561e-07, |
|
"loss": 0.3088, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 4.362566215420836, |
|
"grad_norm": 0.35132384300231934, |
|
"learning_rate": 9.304072119833441e-07, |
|
"loss": 0.2852, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 4.367274867569158, |
|
"grad_norm": 0.327019602060318, |
|
"learning_rate": 9.253520078660541e-07, |
|
"loss": 0.2888, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.371983519717481, |
|
"grad_norm": 0.3622664511203766, |
|
"learning_rate": 9.203074537367007e-07, |
|
"loss": 0.3145, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 4.376692171865804, |
|
"grad_norm": 0.37222006916999817, |
|
"learning_rate": 9.152735837136631e-07, |
|
"loss": 0.3165, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 4.381400824014126, |
|
"grad_norm": 0.35063692927360535, |
|
"learning_rate": 9.102504318430628e-07, |
|
"loss": 0.3102, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 4.386109476162448, |
|
"grad_norm": 0.3508763909339905, |
|
"learning_rate": 9.052380320985274e-07, |
|
"loss": 0.3056, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 4.390818128310771, |
|
"grad_norm": 0.3588324785232544, |
|
"learning_rate": 9.002364183809658e-07, |
|
"loss": 0.3055, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.395526780459093, |
|
"grad_norm": 0.34880831837654114, |
|
"learning_rate": 8.952456245183361e-07, |
|
"loss": 0.3052, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 4.400235432607416, |
|
"grad_norm": 0.35600703954696655, |
|
"learning_rate": 8.902656842654164e-07, |
|
"loss": 0.2837, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 4.404944084755739, |
|
"grad_norm": 0.35241153836250305, |
|
"learning_rate": 8.85296631303579e-07, |
|
"loss": 0.3087, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 4.409652736904061, |
|
"grad_norm": 0.36634692549705505, |
|
"learning_rate": 8.803384992405584e-07, |
|
"loss": 0.3088, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 4.414361389052384, |
|
"grad_norm": 0.3660341501235962, |
|
"learning_rate": 8.753913216102286e-07, |
|
"loss": 0.3025, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.4190700412007065, |
|
"grad_norm": 0.3535331189632416, |
|
"learning_rate": 8.704551318723744e-07, |
|
"loss": 0.2905, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 4.423778693349028, |
|
"grad_norm": 0.3493208587169647, |
|
"learning_rate": 8.655299634124648e-07, |
|
"loss": 0.3032, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 4.428487345497351, |
|
"grad_norm": 0.35454756021499634, |
|
"learning_rate": 8.606158495414258e-07, |
|
"loss": 0.304, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 4.433195997645674, |
|
"grad_norm": 0.3448980450630188, |
|
"learning_rate": 8.55712823495419e-07, |
|
"loss": 0.3018, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 4.437904649793996, |
|
"grad_norm": 0.3334943950176239, |
|
"learning_rate": 8.508209184356114e-07, |
|
"loss": 0.2951, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.442613301942319, |
|
"grad_norm": 0.36533334851264954, |
|
"learning_rate": 8.459401674479594e-07, |
|
"loss": 0.3263, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 4.447321954090642, |
|
"grad_norm": 0.36448344588279724, |
|
"learning_rate": 8.410706035429747e-07, |
|
"loss": 0.287, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 4.452030606238964, |
|
"grad_norm": 0.33500760793685913, |
|
"learning_rate": 8.362122596555089e-07, |
|
"loss": 0.3082, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 4.4567392583872865, |
|
"grad_norm": 0.3625440299510956, |
|
"learning_rate": 8.313651686445256e-07, |
|
"loss": 0.2886, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 4.461447910535609, |
|
"grad_norm": 0.3715088367462158, |
|
"learning_rate": 8.265293632928856e-07, |
|
"loss": 0.3394, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.466156562683932, |
|
"grad_norm": 0.3713008761405945, |
|
"learning_rate": 8.217048763071139e-07, |
|
"loss": 0.3004, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 4.470865214832254, |
|
"grad_norm": 0.3510104715824127, |
|
"learning_rate": 8.168917403171891e-07, |
|
"loss": 0.3035, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 4.475573866980577, |
|
"grad_norm": 0.3302716612815857, |
|
"learning_rate": 8.120899878763181e-07, |
|
"loss": 0.2726, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 4.4802825191289, |
|
"grad_norm": 0.33889657258987427, |
|
"learning_rate": 8.072996514607126e-07, |
|
"loss": 0.2998, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 4.484991171277222, |
|
"grad_norm": 0.33504074811935425, |
|
"learning_rate": 8.025207634693771e-07, |
|
"loss": 0.2969, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.4896998234255445, |
|
"grad_norm": 0.3616447448730469, |
|
"learning_rate": 7.97753356223884e-07, |
|
"loss": 0.3177, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 4.494408475573867, |
|
"grad_norm": 0.36567652225494385, |
|
"learning_rate": 7.92997461968158e-07, |
|
"loss": 0.3116, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 4.499117127722189, |
|
"grad_norm": 0.38666656613349915, |
|
"learning_rate": 7.882531128682539e-07, |
|
"loss": 0.3331, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 4.503825779870512, |
|
"grad_norm": 0.3525826334953308, |
|
"learning_rate": 7.835203410121444e-07, |
|
"loss": 0.2967, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 4.508534432018835, |
|
"grad_norm": 0.3484201431274414, |
|
"learning_rate": 7.787991784095e-07, |
|
"loss": 0.3125, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.513243084167157, |
|
"grad_norm": 0.3635729253292084, |
|
"learning_rate": 7.740896569914741e-07, |
|
"loss": 0.3158, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 4.51795173631548, |
|
"grad_norm": 0.3555293679237366, |
|
"learning_rate": 7.693918086104826e-07, |
|
"loss": 0.2998, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 4.522660388463803, |
|
"grad_norm": 0.367501437664032, |
|
"learning_rate": 7.647056650399951e-07, |
|
"loss": 0.2916, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 4.5273690406121245, |
|
"grad_norm": 0.3521662652492523, |
|
"learning_rate": 7.600312579743161e-07, |
|
"loss": 0.3251, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 4.532077692760447, |
|
"grad_norm": 0.5477653741836548, |
|
"learning_rate": 7.553686190283716e-07, |
|
"loss": 0.2994, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.53678634490877, |
|
"grad_norm": 0.3754388391971588, |
|
"learning_rate": 7.507177797374929e-07, |
|
"loss": 0.3392, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 4.541494997057092, |
|
"grad_norm": 0.3783737123012543, |
|
"learning_rate": 7.460787715572085e-07, |
|
"loss": 0.3113, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 4.546203649205415, |
|
"grad_norm": 0.35153132677078247, |
|
"learning_rate": 7.414516258630245e-07, |
|
"loss": 0.296, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 4.550912301353738, |
|
"grad_norm": 0.34727081656455994, |
|
"learning_rate": 7.368363739502213e-07, |
|
"loss": 0.3004, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 4.55562095350206, |
|
"grad_norm": 0.3494728207588196, |
|
"learning_rate": 7.322330470336314e-07, |
|
"loss": 0.2846, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.560329605650383, |
|
"grad_norm": 0.346566766500473, |
|
"learning_rate": 7.276416762474373e-07, |
|
"loss": 0.3206, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 4.565038257798705, |
|
"grad_norm": 0.34576454758644104, |
|
"learning_rate": 7.230622926449565e-07, |
|
"loss": 0.3072, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 4.569746909947027, |
|
"grad_norm": 0.35648053884506226, |
|
"learning_rate": 7.184949271984299e-07, |
|
"loss": 0.3186, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 4.57445556209535, |
|
"grad_norm": 0.36599478125572205, |
|
"learning_rate": 7.139396107988195e-07, |
|
"loss": 0.3108, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 4.579164214243673, |
|
"grad_norm": 0.3868202567100525, |
|
"learning_rate": 7.093963742555899e-07, |
|
"loss": 0.3204, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.583872866391995, |
|
"grad_norm": 0.3510901629924774, |
|
"learning_rate": 7.04865248296508e-07, |
|
"loss": 0.3177, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 4.588581518540318, |
|
"grad_norm": 0.37985342741012573, |
|
"learning_rate": 7.003462635674291e-07, |
|
"loss": 0.3227, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 4.593290170688641, |
|
"grad_norm": 0.3746037781238556, |
|
"learning_rate": 6.958394506320948e-07, |
|
"loss": 0.324, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 4.597998822836963, |
|
"grad_norm": 0.4108274579048157, |
|
"learning_rate": 6.913448399719225e-07, |
|
"loss": 0.2989, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 4.602707474985285, |
|
"grad_norm": 0.34104016423225403, |
|
"learning_rate": 6.868624619858022e-07, |
|
"loss": 0.2942, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.607416127133608, |
|
"grad_norm": 0.3577067255973816, |
|
"learning_rate": 6.823923469898863e-07, |
|
"loss": 0.3104, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 4.61212477928193, |
|
"grad_norm": 0.3417036235332489, |
|
"learning_rate": 6.779345252173908e-07, |
|
"loss": 0.3032, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 4.616833431430253, |
|
"grad_norm": 0.3480812609195709, |
|
"learning_rate": 6.734890268183855e-07, |
|
"loss": 0.3072, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 4.621542083578576, |
|
"grad_norm": 0.3750641644001007, |
|
"learning_rate": 6.690558818595944e-07, |
|
"loss": 0.317, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 4.626250735726898, |
|
"grad_norm": 0.36166512966156006, |
|
"learning_rate": 6.646351203241869e-07, |
|
"loss": 0.3044, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.630959387875221, |
|
"grad_norm": 0.3501746356487274, |
|
"learning_rate": 6.602267721115807e-07, |
|
"loss": 0.3005, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 4.6356680400235435, |
|
"grad_norm": 0.39372000098228455, |
|
"learning_rate": 6.558308670372368e-07, |
|
"loss": 0.3077, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 4.640376692171865, |
|
"grad_norm": 0.3307865262031555, |
|
"learning_rate": 6.514474348324582e-07, |
|
"loss": 0.2942, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 4.645085344320188, |
|
"grad_norm": 0.36881086230278015, |
|
"learning_rate": 6.470765051441874e-07, |
|
"loss": 0.3006, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 4.649793996468511, |
|
"grad_norm": 0.34105730056762695, |
|
"learning_rate": 6.427181075348085e-07, |
|
"loss": 0.2903, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.654502648616833, |
|
"grad_norm": 0.32339656352996826, |
|
"learning_rate": 6.383722714819468e-07, |
|
"loss": 0.2845, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 4.659211300765156, |
|
"grad_norm": 0.3328464925289154, |
|
"learning_rate": 6.340390263782656e-07, |
|
"loss": 0.293, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 4.663919952913479, |
|
"grad_norm": 0.3342823088169098, |
|
"learning_rate": 6.297184015312754e-07, |
|
"loss": 0.3026, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 4.6686286050618016, |
|
"grad_norm": 0.3467091917991638, |
|
"learning_rate": 6.254104261631255e-07, |
|
"loss": 0.3055, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 4.6733372572101235, |
|
"grad_norm": 0.3539992868900299, |
|
"learning_rate": 6.21115129410416e-07, |
|
"loss": 0.312, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.678045909358446, |
|
"grad_norm": 0.3500078320503235, |
|
"learning_rate": 6.168325403239913e-07, |
|
"loss": 0.2952, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 4.682754561506769, |
|
"grad_norm": 0.3606589138507843, |
|
"learning_rate": 6.125626878687555e-07, |
|
"loss": 0.3298, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 4.687463213655091, |
|
"grad_norm": 0.3727583587169647, |
|
"learning_rate": 6.083056009234631e-07, |
|
"loss": 0.3092, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 4.692171865803414, |
|
"grad_norm": 0.32863423228263855, |
|
"learning_rate": 6.040613082805347e-07, |
|
"loss": 0.2909, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 4.696880517951737, |
|
"grad_norm": 0.3464837670326233, |
|
"learning_rate": 5.998298386458546e-07, |
|
"loss": 0.3022, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.701589170100059, |
|
"grad_norm": 0.3487091064453125, |
|
"learning_rate": 5.95611220638582e-07, |
|
"loss": 0.3026, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 4.706297822248382, |
|
"grad_norm": 0.33962616324424744, |
|
"learning_rate": 5.914054827909549e-07, |
|
"loss": 0.2786, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 4.711006474396704, |
|
"grad_norm": 0.3359411060810089, |
|
"learning_rate": 5.872126535480977e-07, |
|
"loss": 0.3236, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 4.715715126545026, |
|
"grad_norm": 0.3856370449066162, |
|
"learning_rate": 5.830327612678266e-07, |
|
"loss": 0.3197, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 4.720423778693349, |
|
"grad_norm": 0.36933499574661255, |
|
"learning_rate": 5.788658342204628e-07, |
|
"loss": 0.3065, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.725132430841672, |
|
"grad_norm": 0.3596036732196808, |
|
"learning_rate": 5.747119005886362e-07, |
|
"loss": 0.3224, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 4.729841082989994, |
|
"grad_norm": 0.3620109558105469, |
|
"learning_rate": 5.705709884670982e-07, |
|
"loss": 0.315, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 4.734549735138317, |
|
"grad_norm": 0.3684552311897278, |
|
"learning_rate": 5.664431258625305e-07, |
|
"loss": 0.3108, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 4.73925838728664, |
|
"grad_norm": 0.3661479949951172, |
|
"learning_rate": 5.623283406933535e-07, |
|
"loss": 0.3171, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 4.743967039434962, |
|
"grad_norm": 0.3574175238609314, |
|
"learning_rate": 5.582266607895423e-07, |
|
"loss": 0.3035, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.748675691583284, |
|
"grad_norm": 0.3499114513397217, |
|
"learning_rate": 5.541381138924326e-07, |
|
"loss": 0.306, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 4.753384343731607, |
|
"grad_norm": 0.32387298345565796, |
|
"learning_rate": 5.500627276545406e-07, |
|
"loss": 0.2865, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 4.758092995879929, |
|
"grad_norm": 0.3871992230415344, |
|
"learning_rate": 5.460005296393672e-07, |
|
"loss": 0.3512, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 4.762801648028252, |
|
"grad_norm": 0.3580347001552582, |
|
"learning_rate": 5.419515473212192e-07, |
|
"loss": 0.3052, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 4.767510300176575, |
|
"grad_norm": 0.34808874130249023, |
|
"learning_rate": 5.379158080850164e-07, |
|
"loss": 0.309, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.772218952324897, |
|
"grad_norm": 0.35262778401374817, |
|
"learning_rate": 5.338933392261158e-07, |
|
"loss": 0.2998, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 4.77692760447322, |
|
"grad_norm": 0.36279359459877014, |
|
"learning_rate": 5.298841679501163e-07, |
|
"loss": 0.3137, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 4.7816362566215425, |
|
"grad_norm": 0.36099672317504883, |
|
"learning_rate": 5.258883213726829e-07, |
|
"loss": 0.3212, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 4.786344908769864, |
|
"grad_norm": 0.37556588649749756, |
|
"learning_rate": 5.219058265193577e-07, |
|
"loss": 0.3251, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 4.791053560918187, |
|
"grad_norm": 0.3478699028491974, |
|
"learning_rate": 5.179367103253821e-07, |
|
"loss": 0.3224, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.79576221306651, |
|
"grad_norm": 0.35103946924209595, |
|
"learning_rate": 5.139809996355111e-07, |
|
"loss": 0.2948, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 4.800470865214832, |
|
"grad_norm": 0.33818110823631287, |
|
"learning_rate": 5.100387212038324e-07, |
|
"loss": 0.309, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 4.805179517363155, |
|
"grad_norm": 0.371110737323761, |
|
"learning_rate": 5.061099016935872e-07, |
|
"loss": 0.3232, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 4.809888169511478, |
|
"grad_norm": 0.3651929497718811, |
|
"learning_rate": 5.02194567676986e-07, |
|
"loss": 0.2903, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 4.8145968216598, |
|
"grad_norm": 0.4997953772544861, |
|
"learning_rate": 4.982927456350339e-07, |
|
"loss": 0.309, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.8193054738081225, |
|
"grad_norm": 0.33667755126953125, |
|
"learning_rate": 4.944044619573482e-07, |
|
"loss": 0.2909, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 4.824014125956445, |
|
"grad_norm": 0.34498387575149536, |
|
"learning_rate": 4.905297429419809e-07, |
|
"loss": 0.2959, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 4.828722778104767, |
|
"grad_norm": 0.3375857174396515, |
|
"learning_rate": 4.866686147952388e-07, |
|
"loss": 0.2796, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 4.83343143025309, |
|
"grad_norm": 0.3379746079444885, |
|
"learning_rate": 4.828211036315109e-07, |
|
"loss": 0.2916, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 4.838140082401413, |
|
"grad_norm": 0.35453835129737854, |
|
"learning_rate": 4.789872354730873e-07, |
|
"loss": 0.3017, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.842848734549735, |
|
"grad_norm": 0.3590046465396881, |
|
"learning_rate": 4.7516703624998577e-07, |
|
"loss": 0.3063, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 4.847557386698058, |
|
"grad_norm": 0.3402198851108551, |
|
"learning_rate": 4.713605317997741e-07, |
|
"loss": 0.3033, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 4.8522660388463805, |
|
"grad_norm": 0.37940713763237, |
|
"learning_rate": 4.675677478673987e-07, |
|
"loss": 0.2982, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 4.8569746909947025, |
|
"grad_norm": 0.34546539187431335, |
|
"learning_rate": 4.637887101050054e-07, |
|
"loss": 0.3008, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 4.861683343143025, |
|
"grad_norm": 0.36888861656188965, |
|
"learning_rate": 4.6002344407177377e-07, |
|
"loss": 0.325, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.866391995291348, |
|
"grad_norm": 0.35262590646743774, |
|
"learning_rate": 4.5627197523373494e-07, |
|
"loss": 0.2916, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 4.87110064743967, |
|
"grad_norm": 0.35279375314712524, |
|
"learning_rate": 4.5253432896360667e-07, |
|
"loss": 0.3095, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 4.875809299587993, |
|
"grad_norm": 0.36938080191612244, |
|
"learning_rate": 4.4881053054061875e-07, |
|
"loss": 0.3157, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 4.880517951736316, |
|
"grad_norm": 0.5250746607780457, |
|
"learning_rate": 4.451006051503406e-07, |
|
"loss": 0.3154, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 4.885226603884638, |
|
"grad_norm": 0.37330305576324463, |
|
"learning_rate": 4.414045778845144e-07, |
|
"loss": 0.3022, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.8899352560329605, |
|
"grad_norm": 0.337289422750473, |
|
"learning_rate": 4.3772247374088363e-07, |
|
"loss": 0.2992, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 4.894643908181283, |
|
"grad_norm": 0.3455512225627899, |
|
"learning_rate": 4.340543176230233e-07, |
|
"loss": 0.2925, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 4.899352560329605, |
|
"grad_norm": 0.37175658345222473, |
|
"learning_rate": 4.3040013434017203e-07, |
|
"loss": 0.3187, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 4.904061212477928, |
|
"grad_norm": 0.3691946268081665, |
|
"learning_rate": 4.2675994860706477e-07, |
|
"loss": 0.2933, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 4.908769864626251, |
|
"grad_norm": 0.401774138212204, |
|
"learning_rate": 4.2313378504376584e-07, |
|
"loss": 0.3189, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 4.913478516774573, |
|
"grad_norm": 0.3765166699886322, |
|
"learning_rate": 4.1952166817550184e-07, |
|
"loss": 0.318, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 4.918187168922896, |
|
"grad_norm": 0.3326210677623749, |
|
"learning_rate": 4.1592362243249443e-07, |
|
"loss": 0.291, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 4.922895821071219, |
|
"grad_norm": 0.35546964406967163, |
|
"learning_rate": 4.123396721497977e-07, |
|
"loss": 0.3273, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 4.9276044732195405, |
|
"grad_norm": 0.37602460384368896, |
|
"learning_rate": 4.0876984156713264e-07, |
|
"loss": 0.325, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 4.932313125367863, |
|
"grad_norm": 0.3632354736328125, |
|
"learning_rate": 4.0521415482872305e-07, |
|
"loss": 0.2885, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.937021777516186, |
|
"grad_norm": 0.3401520848274231, |
|
"learning_rate": 4.0167263598312997e-07, |
|
"loss": 0.3023, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 4.941730429664508, |
|
"grad_norm": 0.3465152084827423, |
|
"learning_rate": 3.981453089830936e-07, |
|
"loss": 0.317, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 4.946439081812831, |
|
"grad_norm": 0.35135287046432495, |
|
"learning_rate": 3.9463219768536814e-07, |
|
"loss": 0.3104, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 4.951147733961154, |
|
"grad_norm": 0.3682619631290436, |
|
"learning_rate": 3.9113332585056177e-07, |
|
"loss": 0.3014, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 4.955856386109476, |
|
"grad_norm": 0.34490036964416504, |
|
"learning_rate": 3.8764871714297324e-07, |
|
"loss": 0.3045, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 4.960565038257799, |
|
"grad_norm": 0.3499850332736969, |
|
"learning_rate": 3.841783951304365e-07, |
|
"loss": 0.3049, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 4.965273690406121, |
|
"grad_norm": 0.34722211956977844, |
|
"learning_rate": 3.8072238328415753e-07, |
|
"loss": 0.3027, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 4.969982342554443, |
|
"grad_norm": 0.3570912480354309, |
|
"learning_rate": 3.7728070497855595e-07, |
|
"loss": 0.2905, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 4.974690994702766, |
|
"grad_norm": 0.34860408306121826, |
|
"learning_rate": 3.738533834911104e-07, |
|
"loss": 0.3028, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 4.979399646851089, |
|
"grad_norm": 0.362990140914917, |
|
"learning_rate": 3.7044044200219566e-07, |
|
"loss": 0.3244, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.984108298999411, |
|
"grad_norm": 0.3437008261680603, |
|
"learning_rate": 3.6704190359493075e-07, |
|
"loss": 0.298, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 4.988816951147734, |
|
"grad_norm": 0.33662015199661255, |
|
"learning_rate": 3.6365779125501875e-07, |
|
"loss": 0.2919, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 4.993525603296057, |
|
"grad_norm": 0.33186161518096924, |
|
"learning_rate": 3.602881278705953e-07, |
|
"loss": 0.29, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 4.998234255444379, |
|
"grad_norm": 0.3635331094264984, |
|
"learning_rate": 3.5693293623207084e-07, |
|
"loss": 0.2941, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.3635331094264984, |
|
"learning_rate": 3.5359223903197846e-07, |
|
"loss": 0.333, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 5.004708652148323, |
|
"grad_norm": 0.6811550259590149, |
|
"learning_rate": 3.5026605886481743e-07, |
|
"loss": 0.2784, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 5.009417304296645, |
|
"grad_norm": 0.3763936460018158, |
|
"learning_rate": 3.4695441822690414e-07, |
|
"loss": 0.2991, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 5.014125956444968, |
|
"grad_norm": 0.3745104968547821, |
|
"learning_rate": 3.4365733951621796e-07, |
|
"loss": 0.285, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 5.0188346085932904, |
|
"grad_norm": 0.3565599322319031, |
|
"learning_rate": 3.4037484503225e-07, |
|
"loss": 0.3127, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 5.023543260741612, |
|
"grad_norm": 0.36555394530296326, |
|
"learning_rate": 3.3710695697585115e-07, |
|
"loss": 0.2864, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 5.028251912889935, |
|
"grad_norm": 0.3286389708518982, |
|
"learning_rate": 3.338536974490841e-07, |
|
"loss": 0.2963, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 5.032960565038258, |
|
"grad_norm": 0.36263808608055115, |
|
"learning_rate": 3.306150884550732e-07, |
|
"loss": 0.2767, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 5.03766921718658, |
|
"grad_norm": 0.3278793394565582, |
|
"learning_rate": 3.273911518978545e-07, |
|
"loss": 0.3168, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 5.042377869334903, |
|
"grad_norm": 0.3660132586956024, |
|
"learning_rate": 3.2418190958222877e-07, |
|
"loss": 0.3031, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 5.047086521483226, |
|
"grad_norm": 0.3466164469718933, |
|
"learning_rate": 3.2098738321361287e-07, |
|
"loss": 0.2895, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 5.051795173631548, |
|
"grad_norm": 0.35571199655532837, |
|
"learning_rate": 3.1780759439789507e-07, |
|
"loss": 0.3096, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 5.0565038257798705, |
|
"grad_norm": 0.3681715428829193, |
|
"learning_rate": 3.146425646412851e-07, |
|
"loss": 0.2763, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 5.061212477928193, |
|
"grad_norm": 0.4345608055591583, |
|
"learning_rate": 3.114923153501748e-07, |
|
"loss": 0.2946, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 5.065921130076515, |
|
"grad_norm": 0.34654700756073, |
|
"learning_rate": 3.083568678309862e-07, |
|
"loss": 0.2803, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 5.070629782224838, |
|
"grad_norm": 0.3382911682128906, |
|
"learning_rate": 3.0523624329003327e-07, |
|
"loss": 0.3039, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 5.075338434373161, |
|
"grad_norm": 0.37458857893943787, |
|
"learning_rate": 3.0213046283337375e-07, |
|
"loss": 0.2849, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 5.080047086521483, |
|
"grad_norm": 0.3375467360019684, |
|
"learning_rate": 2.990395474666724e-07, |
|
"loss": 0.279, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 5.084755738669806, |
|
"grad_norm": 0.3381260633468628, |
|
"learning_rate": 2.9596351809505125e-07, |
|
"loss": 0.2917, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 5.0894643908181285, |
|
"grad_norm": 0.33453017473220825, |
|
"learning_rate": 2.9290239552295543e-07, |
|
"loss": 0.2831, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 5.0941730429664505, |
|
"grad_norm": 0.3335488736629486, |
|
"learning_rate": 2.8985620045400676e-07, |
|
"loss": 0.2847, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 5.098881695114773, |
|
"grad_norm": 0.3345257639884949, |
|
"learning_rate": 2.868249534908682e-07, |
|
"loss": 0.2958, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 5.103590347263096, |
|
"grad_norm": 0.3563317060470581, |
|
"learning_rate": 2.8380867513510155e-07, |
|
"loss": 0.2815, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 5.108298999411418, |
|
"grad_norm": 0.34579357504844666, |
|
"learning_rate": 2.8080738578703054e-07, |
|
"loss": 0.294, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 5.113007651559741, |
|
"grad_norm": 0.35512229800224304, |
|
"learning_rate": 2.778211057456018e-07, |
|
"loss": 0.2861, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 5.117716303708064, |
|
"grad_norm": 0.35786810517311096, |
|
"learning_rate": 2.7484985520824655e-07, |
|
"loss": 0.2982, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 5.122424955856386, |
|
"grad_norm": 0.38589707016944885, |
|
"learning_rate": 2.7189365427074757e-07, |
|
"loss": 0.3011, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 5.1271336080047085, |
|
"grad_norm": 0.3488099277019501, |
|
"learning_rate": 2.6895252292709977e-07, |
|
"loss": 0.2959, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 5.131842260153031, |
|
"grad_norm": 0.34902307391166687, |
|
"learning_rate": 2.660264810693772e-07, |
|
"loss": 0.2982, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 5.136550912301354, |
|
"grad_norm": 0.3481535017490387, |
|
"learning_rate": 2.6311554848759524e-07, |
|
"loss": 0.2757, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 5.141259564449676, |
|
"grad_norm": 0.3484564423561096, |
|
"learning_rate": 2.602197448695823e-07, |
|
"loss": 0.3075, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 5.145968216597999, |
|
"grad_norm": 0.3666880130767822, |
|
"learning_rate": 2.573390898008399e-07, |
|
"loss": 0.2895, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 5.150676868746322, |
|
"grad_norm": 0.34689509868621826, |
|
"learning_rate": 2.544736027644176e-07, |
|
"loss": 0.2761, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 5.155385520894644, |
|
"grad_norm": 0.518316388130188, |
|
"learning_rate": 2.516233031407739e-07, |
|
"loss": 0.2793, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 5.160094173042967, |
|
"grad_norm": 0.33427321910858154, |
|
"learning_rate": 2.4878821020765136e-07, |
|
"loss": 0.2726, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 5.164802825191289, |
|
"grad_norm": 0.37131553888320923, |
|
"learning_rate": 2.459683431399404e-07, |
|
"loss": 0.2972, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.169511477339611, |
|
"grad_norm": 0.3701353967189789, |
|
"learning_rate": 2.431637210095564e-07, |
|
"loss": 0.2999, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 5.174220129487934, |
|
"grad_norm": 0.3545023500919342, |
|
"learning_rate": 2.403743627853039e-07, |
|
"loss": 0.2899, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 5.178928781636257, |
|
"grad_norm": 0.34868332743644714, |
|
"learning_rate": 2.376002873327532e-07, |
|
"loss": 0.2826, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 5.183637433784579, |
|
"grad_norm": 0.33814677596092224, |
|
"learning_rate": 2.348415134141102e-07, |
|
"loss": 0.2897, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 5.188346085932902, |
|
"grad_norm": 0.36784449219703674, |
|
"learning_rate": 2.3209805968809007e-07, |
|
"loss": 0.3012, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 5.193054738081225, |
|
"grad_norm": 0.38873499631881714, |
|
"learning_rate": 2.293699447097919e-07, |
|
"loss": 0.2915, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 5.197763390229547, |
|
"grad_norm": 0.3387572169303894, |
|
"learning_rate": 2.2665718693057197e-07, |
|
"loss": 0.2875, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 5.202472042377869, |
|
"grad_norm": 0.6812853217124939, |
|
"learning_rate": 2.2395980469792106e-07, |
|
"loss": 0.298, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 5.207180694526192, |
|
"grad_norm": 0.35651031136512756, |
|
"learning_rate": 2.2127781625533612e-07, |
|
"loss": 0.2699, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 5.211889346674514, |
|
"grad_norm": 0.3304518163204193, |
|
"learning_rate": 2.186112397422016e-07, |
|
"loss": 0.3072, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.216597998822837, |
|
"grad_norm": 0.35859474539756775, |
|
"learning_rate": 2.159600931936645e-07, |
|
"loss": 0.2944, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 5.22130665097116, |
|
"grad_norm": 0.376880943775177, |
|
"learning_rate": 2.1332439454051278e-07, |
|
"loss": 0.2902, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 5.226015303119482, |
|
"grad_norm": 0.3474944829940796, |
|
"learning_rate": 2.1070416160905299e-07, |
|
"loss": 0.2913, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 5.230723955267805, |
|
"grad_norm": 0.3503904640674591, |
|
"learning_rate": 2.0809941212099143e-07, |
|
"loss": 0.2856, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 5.2354326074161275, |
|
"grad_norm": 0.33939892053604126, |
|
"learning_rate": 2.0551016369331378e-07, |
|
"loss": 0.2986, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 5.240141259564449, |
|
"grad_norm": 0.36083248257637024, |
|
"learning_rate": 2.0293643383816564e-07, |
|
"loss": 0.2937, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 5.244849911712772, |
|
"grad_norm": 0.35307255387306213, |
|
"learning_rate": 2.0037823996273324e-07, |
|
"loss": 0.2882, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 5.249558563861095, |
|
"grad_norm": 0.33921727538108826, |
|
"learning_rate": 1.9783559936912777e-07, |
|
"loss": 0.2891, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 5.254267216009417, |
|
"grad_norm": 0.35980817675590515, |
|
"learning_rate": 1.9530852925426614e-07, |
|
"loss": 0.2823, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 5.25897586815774, |
|
"grad_norm": 0.4645998477935791, |
|
"learning_rate": 1.927970467097573e-07, |
|
"loss": 0.318, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.263684520306063, |
|
"grad_norm": 0.37159132957458496, |
|
"learning_rate": 1.9030116872178317e-07, |
|
"loss": 0.2979, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 5.268393172454385, |
|
"grad_norm": 0.3535681962966919, |
|
"learning_rate": 1.878209121709873e-07, |
|
"loss": 0.2854, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 5.2731018246027075, |
|
"grad_norm": 0.36089959740638733, |
|
"learning_rate": 1.853562938323586e-07, |
|
"loss": 0.2858, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 5.27781047675103, |
|
"grad_norm": 0.34678971767425537, |
|
"learning_rate": 1.8290733037511721e-07, |
|
"loss": 0.2914, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 5.282519128899352, |
|
"grad_norm": 0.33591699600219727, |
|
"learning_rate": 1.8047403836260523e-07, |
|
"loss": 0.276, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 5.287227781047675, |
|
"grad_norm": 0.34570711851119995, |
|
"learning_rate": 1.7805643425216984e-07, |
|
"loss": 0.2921, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 5.291936433195998, |
|
"grad_norm": 0.3546205759048462, |
|
"learning_rate": 1.7565453439505664e-07, |
|
"loss": 0.2616, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 5.29664508534432, |
|
"grad_norm": 0.3535788655281067, |
|
"learning_rate": 1.7326835503629542e-07, |
|
"loss": 0.2924, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 5.301353737492643, |
|
"grad_norm": 0.36527425050735474, |
|
"learning_rate": 1.7089791231459235e-07, |
|
"loss": 0.3107, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 5.3060623896409655, |
|
"grad_norm": 0.36861780285835266, |
|
"learning_rate": 1.6854322226222103e-07, |
|
"loss": 0.2903, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.3107710417892875, |
|
"grad_norm": 0.381045401096344, |
|
"learning_rate": 1.6620430080491217e-07, |
|
"loss": 0.2782, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 5.31547969393761, |
|
"grad_norm": 0.35286715626716614, |
|
"learning_rate": 1.6388116376174768e-07, |
|
"loss": 0.2961, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 5.320188346085933, |
|
"grad_norm": 0.3706744313240051, |
|
"learning_rate": 1.6157382684505258e-07, |
|
"loss": 0.3087, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 5.324896998234255, |
|
"grad_norm": 0.3698272705078125, |
|
"learning_rate": 1.5928230566028934e-07, |
|
"loss": 0.2909, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 5.329605650382578, |
|
"grad_norm": 0.3476864993572235, |
|
"learning_rate": 1.5700661570595326e-07, |
|
"loss": 0.299, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 5.334314302530901, |
|
"grad_norm": 0.3664033114910126, |
|
"learning_rate": 1.5474677237346468e-07, |
|
"loss": 0.2937, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 5.339022954679223, |
|
"grad_norm": 0.3502042889595032, |
|
"learning_rate": 1.5250279094706778e-07, |
|
"loss": 0.2941, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 5.3437316068275456, |
|
"grad_norm": 0.3417883515357971, |
|
"learning_rate": 1.5027468660372608e-07, |
|
"loss": 0.2731, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 5.348440258975868, |
|
"grad_norm": 0.32379576563835144, |
|
"learning_rate": 1.4806247441302046e-07, |
|
"loss": 0.2948, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 5.35314891112419, |
|
"grad_norm": 0.3409654498100281, |
|
"learning_rate": 1.4586616933704528e-07, |
|
"loss": 0.2758, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.357857563272513, |
|
"grad_norm": 0.3374674320220947, |
|
"learning_rate": 1.4368578623030982e-07, |
|
"loss": 0.3127, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 5.362566215420836, |
|
"grad_norm": 0.3632641136646271, |
|
"learning_rate": 1.4152133983963644e-07, |
|
"loss": 0.2971, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 5.367274867569158, |
|
"grad_norm": 0.3576749861240387, |
|
"learning_rate": 1.3937284480405987e-07, |
|
"loss": 0.2947, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 5.371983519717481, |
|
"grad_norm": 0.3590938448905945, |
|
"learning_rate": 1.3724031565473112e-07, |
|
"loss": 0.2895, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 5.376692171865804, |
|
"grad_norm": 0.3656778037548065, |
|
"learning_rate": 1.3512376681481558e-07, |
|
"loss": 0.2841, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 5.381400824014126, |
|
"grad_norm": 0.35744982957839966, |
|
"learning_rate": 1.3302321259939883e-07, |
|
"loss": 0.289, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 5.386109476162448, |
|
"grad_norm": 0.3551943004131317, |
|
"learning_rate": 1.3093866721538617e-07, |
|
"loss": 0.293, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 5.390818128310771, |
|
"grad_norm": 0.3526786267757416, |
|
"learning_rate": 1.2887014476141214e-07, |
|
"loss": 0.3119, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 5.395526780459093, |
|
"grad_norm": 0.37751346826553345, |
|
"learning_rate": 1.268176592277376e-07, |
|
"loss": 0.3053, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 5.400235432607416, |
|
"grad_norm": 0.3662261962890625, |
|
"learning_rate": 1.2478122449616215e-07, |
|
"loss": 0.2832, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.404944084755739, |
|
"grad_norm": 0.3833887279033661, |
|
"learning_rate": 1.2276085433992557e-07, |
|
"loss": 0.2971, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 5.409652736904061, |
|
"grad_norm": 0.36077573895454407, |
|
"learning_rate": 1.2075656242361732e-07, |
|
"loss": 0.2592, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 5.414361389052384, |
|
"grad_norm": 0.32590293884277344, |
|
"learning_rate": 1.1876836230308275e-07, |
|
"loss": 0.2997, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 5.4190700412007065, |
|
"grad_norm": 0.3507887125015259, |
|
"learning_rate": 1.1679626742533201e-07, |
|
"loss": 0.2917, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 5.423778693349028, |
|
"grad_norm": 0.35047176480293274, |
|
"learning_rate": 1.1484029112844797e-07, |
|
"loss": 0.3099, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 5.428487345497351, |
|
"grad_norm": 0.40482771396636963, |
|
"learning_rate": 1.1290044664149874e-07, |
|
"loss": 0.2895, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 5.433195997645674, |
|
"grad_norm": 0.3553638160228729, |
|
"learning_rate": 1.1097674708444472e-07, |
|
"loss": 0.2957, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 5.437904649793996, |
|
"grad_norm": 0.355226069688797, |
|
"learning_rate": 1.0906920546805255e-07, |
|
"loss": 0.3006, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 5.442613301942319, |
|
"grad_norm": 0.37443095445632935, |
|
"learning_rate": 1.07177834693806e-07, |
|
"loss": 0.2902, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 5.447321954090642, |
|
"grad_norm": 0.3441007435321808, |
|
"learning_rate": 1.0530264755381826e-07, |
|
"loss": 0.2611, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.452030606238964, |
|
"grad_norm": 0.33422932028770447, |
|
"learning_rate": 1.0344365673074674e-07, |
|
"loss": 0.283, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 5.4567392583872865, |
|
"grad_norm": 0.34415027499198914, |
|
"learning_rate": 1.0160087479770514e-07, |
|
"loss": 0.2868, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 5.461447910535609, |
|
"grad_norm": 0.36474141478538513, |
|
"learning_rate": 9.977431421818229e-08, |
|
"loss": 0.3002, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 5.466156562683932, |
|
"grad_norm": 0.4352918565273285, |
|
"learning_rate": 9.796398734595285e-08, |
|
"loss": 0.3022, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 5.470865214832254, |
|
"grad_norm": 0.3702634572982788, |
|
"learning_rate": 9.616990642499796e-08, |
|
"loss": 0.2976, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 5.475573866980577, |
|
"grad_norm": 0.3480238616466522, |
|
"learning_rate": 9.43920835894191e-08, |
|
"loss": 0.2856, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 5.4802825191289, |
|
"grad_norm": 0.378632515668869, |
|
"learning_rate": 9.263053086336044e-08, |
|
"loss": 0.2947, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 5.484991171277222, |
|
"grad_norm": 0.39319896697998047, |
|
"learning_rate": 9.088526016092142e-08, |
|
"loss": 0.2951, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 5.4896998234255445, |
|
"grad_norm": 0.3514487147331238, |
|
"learning_rate": 8.915628328608206e-08, |
|
"loss": 0.2995, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 5.494408475573867, |
|
"grad_norm": 0.38079124689102173, |
|
"learning_rate": 8.744361193261913e-08, |
|
"loss": 0.3163, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.499117127722189, |
|
"grad_norm": 0.36984172463417053, |
|
"learning_rate": 8.574725768402903e-08, |
|
"loss": 0.2783, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 5.503825779870512, |
|
"grad_norm": 0.36924082040786743, |
|
"learning_rate": 8.406723201344891e-08, |
|
"loss": 0.2848, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 5.508534432018835, |
|
"grad_norm": 0.3524254262447357, |
|
"learning_rate": 8.240354628357899e-08, |
|
"loss": 0.3126, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 5.513243084167157, |
|
"grad_norm": 0.37437206506729126, |
|
"learning_rate": 8.075621174660626e-08, |
|
"loss": 0.2956, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 5.51795173631548, |
|
"grad_norm": 0.3524854779243469, |
|
"learning_rate": 7.912523954412693e-08, |
|
"loss": 0.2885, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 5.522660388463803, |
|
"grad_norm": 0.41334500908851624, |
|
"learning_rate": 7.751064070707248e-08, |
|
"loss": 0.2838, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 5.5273690406121245, |
|
"grad_norm": 0.33389919996261597, |
|
"learning_rate": 7.591242615563487e-08, |
|
"loss": 0.2923, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 5.532077692760447, |
|
"grad_norm": 0.349431574344635, |
|
"learning_rate": 7.433060669919307e-08, |
|
"loss": 0.2814, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 5.53678634490877, |
|
"grad_norm": 0.34167221188545227, |
|
"learning_rate": 7.27651930362372e-08, |
|
"loss": 0.3036, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 5.541494997057092, |
|
"grad_norm": 0.36053913831710815, |
|
"learning_rate": 7.121619575430061e-08, |
|
"loss": 0.295, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.546203649205415, |
|
"grad_norm": 0.34517520666122437, |
|
"learning_rate": 6.968362532988465e-08, |
|
"loss": 0.2877, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 5.550912301353738, |
|
"grad_norm": 0.3514353036880493, |
|
"learning_rate": 6.816749212839008e-08, |
|
"loss": 0.2846, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 5.55562095350206, |
|
"grad_norm": 0.4496132731437683, |
|
"learning_rate": 6.666780640404436e-08, |
|
"loss": 0.2776, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 5.560329605650383, |
|
"grad_norm": 0.3367858827114105, |
|
"learning_rate": 6.51845782998356e-08, |
|
"loss": 0.3124, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 5.565038257798705, |
|
"grad_norm": 0.3680709898471832, |
|
"learning_rate": 6.371781784744124e-08, |
|
"loss": 0.2965, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 5.569746909947027, |
|
"grad_norm": 0.349960595369339, |
|
"learning_rate": 6.226753496716254e-08, |
|
"loss": 0.29, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 5.57445556209535, |
|
"grad_norm": 0.35704490542411804, |
|
"learning_rate": 6.083373946785459e-08, |
|
"loss": 0.2893, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 5.579164214243673, |
|
"grad_norm": 0.36921223998069763, |
|
"learning_rate": 5.941644104686256e-08, |
|
"loss": 0.301, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 5.583872866391995, |
|
"grad_norm": 0.3648601174354553, |
|
"learning_rate": 5.8015649289955014e-08, |
|
"loss": 0.2765, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 5.588581518540318, |
|
"grad_norm": 0.33216673135757446, |
|
"learning_rate": 5.663137367125898e-08, |
|
"loss": 0.2764, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 5.593290170688641, |
|
"grad_norm": 0.34482541680336, |
|
"learning_rate": 5.5263623553196424e-08, |
|
"loss": 0.2959, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 5.597998822836963, |
|
"grad_norm": 0.3605605959892273, |
|
"learning_rate": 5.391240818642007e-08, |
|
"loss": 0.2871, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 5.602707474985285, |
|
"grad_norm": 0.36288997530937195, |
|
"learning_rate": 5.257773670975214e-08, |
|
"loss": 0.3013, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 5.607416127133608, |
|
"grad_norm": 0.37322282791137695, |
|
"learning_rate": 5.12596181501207e-08, |
|
"loss": 0.2878, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 5.61212477928193, |
|
"grad_norm": 0.343702495098114, |
|
"learning_rate": 4.995806142250065e-08, |
|
"loss": 0.2656, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 5.616833431430253, |
|
"grad_norm": 0.3344155550003052, |
|
"learning_rate": 4.867307532985227e-08, |
|
"loss": 0.3142, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 5.621542083578576, |
|
"grad_norm": 0.3713268041610718, |
|
"learning_rate": 4.740466856306164e-08, |
|
"loss": 0.2864, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 5.626250735726898, |
|
"grad_norm": 0.34247687458992004, |
|
"learning_rate": 4.615284970088174e-08, |
|
"loss": 0.2865, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 5.630959387875221, |
|
"grad_norm": 0.38114747405052185, |
|
"learning_rate": 4.491762720987558e-08, |
|
"loss": 0.3027, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 5.6356680400235435, |
|
"grad_norm": 0.3579918146133423, |
|
"learning_rate": 4.369900944435734e-08, |
|
"loss": 0.3007, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 5.640376692171865, |
|
"grad_norm": 0.35347315669059753, |
|
"learning_rate": 4.249700464633744e-08, |
|
"loss": 0.285, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 5.645085344320188, |
|
"grad_norm": 0.35810258984565735, |
|
"learning_rate": 4.1311620945465314e-08, |
|
"loss": 0.287, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 5.649793996468511, |
|
"grad_norm": 0.3517029881477356, |
|
"learning_rate": 4.014286635897535e-08, |
|
"loss": 0.3004, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 5.654502648616833, |
|
"grad_norm": 0.3566516935825348, |
|
"learning_rate": 3.899074879163245e-08, |
|
"loss": 0.2794, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 5.659211300765156, |
|
"grad_norm": 0.4844449758529663, |
|
"learning_rate": 3.785527603567929e-08, |
|
"loss": 0.2724, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 5.663919952913479, |
|
"grad_norm": 0.3332138955593109, |
|
"learning_rate": 3.673645577078111e-08, |
|
"loss": 0.3026, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 5.6686286050618016, |
|
"grad_norm": 0.35666659474372864, |
|
"learning_rate": 3.5634295563977126e-08, |
|
"loss": 0.2889, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 5.6733372572101235, |
|
"grad_norm": 0.43679553270339966, |
|
"learning_rate": 3.4548802869627806e-08, |
|
"loss": 0.3002, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 5.678045909358446, |
|
"grad_norm": 0.354524165391922, |
|
"learning_rate": 3.347998502936267e-08, |
|
"loss": 0.302, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 5.682754561506769, |
|
"grad_norm": 0.3709380328655243, |
|
"learning_rate": 3.242784927203507e-08, |
|
"loss": 0.3057, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 5.687463213655091, |
|
"grad_norm": 0.35389119386672974, |
|
"learning_rate": 3.1392402713668056e-08, |
|
"loss": 0.2975, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 5.692171865803414, |
|
"grad_norm": 0.36993980407714844, |
|
"learning_rate": 3.0373652357410243e-08, |
|
"loss": 0.2738, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 5.696880517951737, |
|
"grad_norm": 0.3319277763366699, |
|
"learning_rate": 2.937160509348641e-08, |
|
"loss": 0.2864, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 5.701589170100059, |
|
"grad_norm": 0.3565705418586731, |
|
"learning_rate": 2.838626769915226e-08, |
|
"loss": 0.2783, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 5.706297822248382, |
|
"grad_norm": 0.34223315119743347, |
|
"learning_rate": 2.741764683864695e-08, |
|
"loss": 0.2773, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 5.711006474396704, |
|
"grad_norm": 0.33122357726097107, |
|
"learning_rate": 2.6465749063149247e-08, |
|
"loss": 0.2791, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 5.715715126545026, |
|
"grad_norm": 0.3491283357143402, |
|
"learning_rate": 2.553058081073312e-08, |
|
"loss": 0.2573, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 5.720423778693349, |
|
"grad_norm": 0.3793807923793793, |
|
"learning_rate": 2.461214840632331e-08, |
|
"loss": 0.3066, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 5.725132430841672, |
|
"grad_norm": 0.3676687777042389, |
|
"learning_rate": 2.3710458061653453e-08, |
|
"loss": 0.2955, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 5.729841082989994, |
|
"grad_norm": 0.3596877455711365, |
|
"learning_rate": 2.2825515875224414e-08, |
|
"loss": 0.3021, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 5.734549735138317, |
|
"grad_norm": 0.3660774528980255, |
|
"learning_rate": 2.1957327832261567e-08, |
|
"loss": 0.2806, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 5.73925838728664, |
|
"grad_norm": 0.3432364761829376, |
|
"learning_rate": 2.110589980467537e-08, |
|
"loss": 0.2792, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 5.743967039434962, |
|
"grad_norm": 0.3409741520881653, |
|
"learning_rate": 2.0271237551021395e-08, |
|
"loss": 0.2828, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 5.748675691583284, |
|
"grad_norm": 0.3487401604652405, |
|
"learning_rate": 1.945334671646232e-08, |
|
"loss": 0.294, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 5.753384343731607, |
|
"grad_norm": 0.3659566640853882, |
|
"learning_rate": 1.865223283272738e-08, |
|
"loss": 0.2897, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 5.758092995879929, |
|
"grad_norm": 0.3651147186756134, |
|
"learning_rate": 1.7867901318077696e-08, |
|
"loss": 0.3139, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 5.762801648028252, |
|
"grad_norm": 0.38366273045539856, |
|
"learning_rate": 1.710035747726768e-08, |
|
"loss": 0.2798, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 5.767510300176575, |
|
"grad_norm": 0.3514769375324249, |
|
"learning_rate": 1.6349606501509796e-08, |
|
"loss": 0.2884, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 5.772218952324897, |
|
"grad_norm": 0.329897940158844, |
|
"learning_rate": 1.561565346843985e-08, |
|
"loss": 0.2697, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 5.77692760447322, |
|
"grad_norm": 0.3363187909126282, |
|
"learning_rate": 1.4898503342082592e-08, |
|
"loss": 0.297, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 5.7816362566215425, |
|
"grad_norm": 0.36480292677879333, |
|
"learning_rate": 1.4198160972816732e-08, |
|
"loss": 0.2828, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 5.786344908769864, |
|
"grad_norm": 0.352155864238739, |
|
"learning_rate": 1.351463109734441e-08, |
|
"loss": 0.2747, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 5.791053560918187, |
|
"grad_norm": 0.3681313693523407, |
|
"learning_rate": 1.2847918338657617e-08, |
|
"loss": 0.2956, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 5.79576221306651, |
|
"grad_norm": 0.3519361913204193, |
|
"learning_rate": 1.2198027206006823e-08, |
|
"loss": 0.2964, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 5.800470865214832, |
|
"grad_norm": 0.3671719431877136, |
|
"learning_rate": 1.1564962094871569e-08, |
|
"loss": 0.2981, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 5.805179517363155, |
|
"grad_norm": 0.36352333426475525, |
|
"learning_rate": 1.0948727286930194e-08, |
|
"loss": 0.2731, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 5.809888169511478, |
|
"grad_norm": 0.3341374695301056, |
|
"learning_rate": 1.0349326950030159e-08, |
|
"loss": 0.293, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 5.8145968216598, |
|
"grad_norm": 0.3493800759315491, |
|
"learning_rate": 9.766765138160828e-09, |
|
"loss": 0.2873, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 5.8193054738081225, |
|
"grad_norm": 0.3361739218235016, |
|
"learning_rate": 9.201045791426555e-09, |
|
"loss": 0.2787, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 5.824014125956445, |
|
"grad_norm": 0.33776283264160156, |
|
"learning_rate": 8.652172736017817e-09, |
|
"loss": 0.3051, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 5.828722778104767, |
|
"grad_norm": 0.35557821393013, |
|
"learning_rate": 8.120149684187618e-09, |
|
"loss": 0.2668, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 5.83343143025309, |
|
"grad_norm": 0.3537602424621582, |
|
"learning_rate": 7.604980234225124e-09, |
|
"loss": 0.3018, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 5.838140082401413, |
|
"grad_norm": 0.3534339964389801, |
|
"learning_rate": 7.106667870432071e-09, |
|
"loss": 0.2764, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 5.842848734549735, |
|
"grad_norm": 0.3374592363834381, |
|
"learning_rate": 6.625215963098896e-09, |
|
"loss": 0.2806, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 5.847557386698058, |
|
"grad_norm": 0.3431640565395355, |
|
"learning_rate": 6.160627768481142e-09, |
|
"loss": 0.3044, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 5.8522660388463805, |
|
"grad_norm": 0.36093828082084656, |
|
"learning_rate": 5.712906428778919e-09, |
|
"loss": 0.2911, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 5.8569746909947025, |
|
"grad_norm": 0.3618963658809662, |
|
"learning_rate": 5.2820549721144254e-09, |
|
"loss": 0.2915, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 5.861683343143025, |
|
"grad_norm": 0.34805533289909363, |
|
"learning_rate": 4.868076312512515e-09, |
|
"loss": 0.2804, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 5.866391995291348, |
|
"grad_norm": 0.344169557094574, |
|
"learning_rate": 4.470973249879607e-09, |
|
"loss": 0.282, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 5.87110064743967, |
|
"grad_norm": 0.3552144169807434, |
|
"learning_rate": 4.090748469986472e-09, |
|
"loss": 0.3176, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 5.875809299587993, |
|
"grad_norm": 0.352480947971344, |
|
"learning_rate": 3.727404544448254e-09, |
|
"loss": 0.2767, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 5.880517951736316, |
|
"grad_norm": 0.3459225594997406, |
|
"learning_rate": 3.3809439307086465e-09, |
|
"loss": 0.2794, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 5.885226603884638, |
|
"grad_norm": 0.3352890908718109, |
|
"learning_rate": 3.051368972022406e-09, |
|
"loss": 0.2913, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 5.8899352560329605, |
|
"grad_norm": 0.3477289378643036, |
|
"learning_rate": 2.7386818974395324e-09, |
|
"loss": 0.3003, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 5.894643908181283, |
|
"grad_norm": 0.36927521228790283, |
|
"learning_rate": 2.4428848217908362e-09, |
|
"loss": 0.3073, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 5.899352560329605, |
|
"grad_norm": 0.37141096591949463, |
|
"learning_rate": 2.1639797456723955e-09, |
|
"loss": 0.2815, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 5.904061212477928, |
|
"grad_norm": 0.34543880820274353, |
|
"learning_rate": 1.9019685554333422e-09, |
|
"loss": 0.2837, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 5.908769864626251, |
|
"grad_norm": 0.4301663339138031, |
|
"learning_rate": 1.6568530231628189e-09, |
|
"loss": 0.2703, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 5.913478516774573, |
|
"grad_norm": 0.3248118758201599, |
|
"learning_rate": 1.4286348066769317e-09, |
|
"loss": 0.2937, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 5.918187168922896, |
|
"grad_norm": 0.35874074697494507, |
|
"learning_rate": 1.2173154495087603e-09, |
|
"loss": 0.2643, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 5.922895821071219, |
|
"grad_norm": 0.3497905135154724, |
|
"learning_rate": 1.0228963808978087e-09, |
|
"loss": 0.2906, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 5.9276044732195405, |
|
"grad_norm": 0.3518345057964325, |
|
"learning_rate": 8.453789157794601e-10, |
|
"loss": 0.3026, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 5.932313125367863, |
|
"grad_norm": 0.38623765110969543, |
|
"learning_rate": 6.847642547769262e-10, |
|
"loss": 0.2787, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 5.937021777516186, |
|
"grad_norm": 0.34783104062080383, |
|
"learning_rate": 5.41053484192644e-10, |
|
"loss": 0.2924, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 5.941730429664508, |
|
"grad_norm": 0.34925082325935364, |
|
"learning_rate": 4.14247576001614e-10, |
|
"loss": 0.2857, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 5.946439081812831, |
|
"grad_norm": 0.5177316665649414, |
|
"learning_rate": 3.043473878436287e-10, |
|
"loss": 0.305, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 5.951147733961154, |
|
"grad_norm": 0.3745754063129425, |
|
"learning_rate": 2.1135366301910932e-10, |
|
"loss": 0.3044, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 5.955856386109476, |
|
"grad_norm": 0.36931711435317993, |
|
"learning_rate": 1.3526703048216683e-10, |
|
"loss": 0.2991, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 5.960565038257799, |
|
"grad_norm": 0.36866995692253113, |
|
"learning_rate": 7.608800483782652e-11, |
|
"loss": 0.3037, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 5.965273690406121, |
|
"grad_norm": 0.36637187004089355, |
|
"learning_rate": 3.381698633814212e-11, |
|
"loss": 0.3177, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 5.969982342554443, |
|
"grad_norm": 0.39666616916656494, |
|
"learning_rate": 8.45426087942025e-12, |
|
"loss": 0.3023, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 5.974690994702766, |
|
"grad_norm": 0.3777483403682709, |
|
"learning_rate": 0.0, |
|
"loss": 0.2891, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 5.974690994702766, |
|
"step": 1272, |
|
"total_flos": 1.2865326835008799e+19, |
|
"train_loss": 0.3775725749866018, |
|
"train_runtime": 249298.3546, |
|
"train_samples_per_second": 5.233, |
|
"train_steps_per_second": 0.005 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1272, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 6, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2865326835008799e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|