|
{ |
|
"best_global_step": null, |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9883268482490273, |
|
"eval_steps": 32, |
|
"global_step": 256, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.007782101167315175, |
|
"grad_norm": 13.2358487482735, |
|
"learning_rate": 4e-08, |
|
"loss": 1.3028, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.007782101167315175, |
|
"eval_loss": 1.1581796407699585, |
|
"eval_runtime": 193.4094, |
|
"eval_samples_per_second": 13.939, |
|
"eval_steps_per_second": 0.222, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01556420233463035, |
|
"grad_norm": 13.517130491540176, |
|
"learning_rate": 8e-08, |
|
"loss": 1.2437, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.023346303501945526, |
|
"grad_norm": 13.028970161750262, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 1.3071, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0311284046692607, |
|
"grad_norm": 13.538257754320586, |
|
"learning_rate": 1.6e-07, |
|
"loss": 1.3013, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.038910505836575876, |
|
"grad_norm": 13.666771312447045, |
|
"learning_rate": 2e-07, |
|
"loss": 1.2823, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.04669260700389105, |
|
"grad_norm": 12.758203259942219, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 1.2447, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.054474708171206226, |
|
"grad_norm": 12.521308995567729, |
|
"learning_rate": 2.8e-07, |
|
"loss": 1.261, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0622568093385214, |
|
"grad_norm": 11.561801223157566, |
|
"learning_rate": 3.2e-07, |
|
"loss": 1.2665, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07003891050583658, |
|
"grad_norm": 11.156739432208562, |
|
"learning_rate": 3.6e-07, |
|
"loss": 1.2488, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07782101167315175, |
|
"grad_norm": 10.490731019339595, |
|
"learning_rate": 4e-07, |
|
"loss": 1.2695, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08560311284046693, |
|
"grad_norm": 8.664509193480505, |
|
"learning_rate": 4.3999999999999997e-07, |
|
"loss": 1.2306, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0933852140077821, |
|
"grad_norm": 7.187121902510894, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 1.2614, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.10116731517509728, |
|
"grad_norm": 6.109576574934582, |
|
"learning_rate": 5.2e-07, |
|
"loss": 1.227, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.10894941634241245, |
|
"grad_norm": 5.033716100027243, |
|
"learning_rate": 5.6e-07, |
|
"loss": 1.2676, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.11673151750972763, |
|
"grad_norm": 3.4452961961682815, |
|
"learning_rate": 6e-07, |
|
"loss": 1.177, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1245136186770428, |
|
"grad_norm": 3.1164741035862455, |
|
"learning_rate": 6.4e-07, |
|
"loss": 1.229, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.13229571984435798, |
|
"grad_norm": 3.1002564736005267, |
|
"learning_rate": 6.8e-07, |
|
"loss": 1.2488, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.14007782101167315, |
|
"grad_norm": 3.1257169665944833, |
|
"learning_rate": 7.2e-07, |
|
"loss": 1.2324, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.14785992217898833, |
|
"grad_norm": 3.0581482597501832, |
|
"learning_rate": 7.599999999999999e-07, |
|
"loss": 1.1873, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1556420233463035, |
|
"grad_norm": 3.043731725003644, |
|
"learning_rate": 8e-07, |
|
"loss": 1.1759, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.16342412451361868, |
|
"grad_norm": 2.734793910693522, |
|
"learning_rate": 8.4e-07, |
|
"loss": 1.165, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.17120622568093385, |
|
"grad_norm": 2.5471637005098233, |
|
"learning_rate": 8.799999999999999e-07, |
|
"loss": 1.2258, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.17898832684824903, |
|
"grad_norm": 2.133879636511503, |
|
"learning_rate": 9.2e-07, |
|
"loss": 1.1924, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1867704280155642, |
|
"grad_norm": 1.809481995522435, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 1.1285, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.19455252918287938, |
|
"grad_norm": 1.5258999116809353, |
|
"learning_rate": 1e-06, |
|
"loss": 1.1613, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.20233463035019456, |
|
"grad_norm": 1.363595982806325, |
|
"learning_rate": 9.995654063450673e-07, |
|
"loss": 1.1791, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.21011673151750973, |
|
"grad_norm": 1.4125313988908728, |
|
"learning_rate": 9.9912739965096e-07, |
|
"loss": 1.2031, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2178988326848249, |
|
"grad_norm": 1.446135093305486, |
|
"learning_rate": 9.986859395532194e-07, |
|
"loss": 1.1514, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.22568093385214008, |
|
"grad_norm": 1.342713959761476, |
|
"learning_rate": 9.98240985048373e-07, |
|
"loss": 1.1741, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.23346303501945526, |
|
"grad_norm": 1.2716097075011323, |
|
"learning_rate": 9.977924944812361e-07, |
|
"loss": 1.1696, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.24124513618677043, |
|
"grad_norm": 1.1283654885451528, |
|
"learning_rate": 9.973404255319148e-07, |
|
"loss": 1.1564, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2490272373540856, |
|
"grad_norm": 1.0833672902427003, |
|
"learning_rate": 9.968847352024923e-07, |
|
"loss": 1.1924, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.2490272373540856, |
|
"eval_loss": 1.017075777053833, |
|
"eval_runtime": 192.9829, |
|
"eval_samples_per_second": 13.97, |
|
"eval_steps_per_second": 0.223, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.25680933852140075, |
|
"grad_norm": 1.1221714895781139, |
|
"learning_rate": 9.964253798033959e-07, |
|
"loss": 1.1953, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.26459143968871596, |
|
"grad_norm": 1.1667935320099643, |
|
"learning_rate": 9.959623149394347e-07, |
|
"loss": 1.1631, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2723735408560311, |
|
"grad_norm": 1.1728769110368067, |
|
"learning_rate": 9.954954954954955e-07, |
|
"loss": 1.1979, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2801556420233463, |
|
"grad_norm": 1.1879928099076524, |
|
"learning_rate": 9.950248756218905e-07, |
|
"loss": 1.2109, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.28793774319066145, |
|
"grad_norm": 1.0793090685595952, |
|
"learning_rate": 9.94550408719346e-07, |
|
"loss": 1.1517, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.29571984435797666, |
|
"grad_norm": 1.2162353553544407, |
|
"learning_rate": 9.940720474236205e-07, |
|
"loss": 1.1667, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3035019455252918, |
|
"grad_norm": 1.1553136659353096, |
|
"learning_rate": 9.935897435897434e-07, |
|
"loss": 1.1564, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.311284046692607, |
|
"grad_norm": 0.8975925277420491, |
|
"learning_rate": 9.931034482758622e-07, |
|
"loss": 1.1409, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.31906614785992216, |
|
"grad_norm": 0.848010373104077, |
|
"learning_rate": 9.92613111726685e-07, |
|
"loss": 1.1489, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.32684824902723736, |
|
"grad_norm": 0.8445987480588121, |
|
"learning_rate": 9.921186833565135e-07, |
|
"loss": 1.1518, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3346303501945525, |
|
"grad_norm": 0.8910485581415633, |
|
"learning_rate": 9.916201117318436e-07, |
|
"loss": 1.1663, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.3424124513618677, |
|
"grad_norm": 0.8226662462385251, |
|
"learning_rate": 9.911173445535296e-07, |
|
"loss": 1.1975, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.35019455252918286, |
|
"grad_norm": 1.266452159867259, |
|
"learning_rate": 9.906103286384975e-07, |
|
"loss": 1.19, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.35797665369649806, |
|
"grad_norm": 0.8228834741800612, |
|
"learning_rate": 9.900990099009902e-07, |
|
"loss": 1.1672, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.3657587548638132, |
|
"grad_norm": 0.7967316496651314, |
|
"learning_rate": 9.895833333333333e-07, |
|
"loss": 1.1653, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3735408560311284, |
|
"grad_norm": 0.8385685679438281, |
|
"learning_rate": 9.8906324298621e-07, |
|
"loss": 1.2161, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.38132295719844356, |
|
"grad_norm": 0.7832731689890803, |
|
"learning_rate": 9.88538681948424e-07, |
|
"loss": 1.1493, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.38910505836575876, |
|
"grad_norm": 0.7470950974115376, |
|
"learning_rate": 9.88009592326139e-07, |
|
"loss": 1.1444, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3968871595330739, |
|
"grad_norm": 0.9407081004608256, |
|
"learning_rate": 9.874759152215799e-07, |
|
"loss": 1.2262, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4046692607003891, |
|
"grad_norm": 0.7807613751449256, |
|
"learning_rate": 9.869375907111757e-07, |
|
"loss": 1.1761, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.41245136186770426, |
|
"grad_norm": 0.7814091677207984, |
|
"learning_rate": 9.86394557823129e-07, |
|
"loss": 1.1684, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.42023346303501946, |
|
"grad_norm": 0.7209307303105739, |
|
"learning_rate": 9.858467545143973e-07, |
|
"loss": 1.1429, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4280155642023346, |
|
"grad_norm": 0.6839759799403674, |
|
"learning_rate": 9.852941176470587e-07, |
|
"loss": 1.1591, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.4357976653696498, |
|
"grad_norm": 0.7399500476567901, |
|
"learning_rate": 9.847365829640571e-07, |
|
"loss": 1.1264, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.44357976653696496, |
|
"grad_norm": 0.7250794425256079, |
|
"learning_rate": 9.841740850642927e-07, |
|
"loss": 1.1764, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.45136186770428016, |
|
"grad_norm": 0.6729705016341, |
|
"learning_rate": 9.836065573770493e-07, |
|
"loss": 1.1557, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.4591439688715953, |
|
"grad_norm": 0.6814295448695143, |
|
"learning_rate": 9.830339321357286e-07, |
|
"loss": 1.1674, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.4669260700389105, |
|
"grad_norm": 0.6657154699765654, |
|
"learning_rate": 9.824561403508773e-07, |
|
"loss": 1.1441, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.47470817120622566, |
|
"grad_norm": 0.6256174713114324, |
|
"learning_rate": 9.818731117824774e-07, |
|
"loss": 1.1797, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.48249027237354086, |
|
"grad_norm": 0.6194904383458136, |
|
"learning_rate": 9.81284774911482e-07, |
|
"loss": 1.1476, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.490272373540856, |
|
"grad_norm": 0.7409013991341397, |
|
"learning_rate": 9.80691056910569e-07, |
|
"loss": 1.1563, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.4980544747081712, |
|
"grad_norm": 0.6529431777087924, |
|
"learning_rate": 9.800918836140888e-07, |
|
"loss": 1.13, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4980544747081712, |
|
"eval_loss": 0.993022084236145, |
|
"eval_runtime": 193.0792, |
|
"eval_samples_per_second": 13.963, |
|
"eval_steps_per_second": 0.223, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5058365758754864, |
|
"grad_norm": 0.6249987093009363, |
|
"learning_rate": 9.794871794871796e-07, |
|
"loss": 1.1365, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5136186770428015, |
|
"grad_norm": 0.5714052179303046, |
|
"learning_rate": 9.788768675940237e-07, |
|
"loss": 1.1486, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5214007782101168, |
|
"grad_norm": 0.6266253830635832, |
|
"learning_rate": 9.782608695652173e-07, |
|
"loss": 1.1297, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5291828793774319, |
|
"grad_norm": 0.6413361931814175, |
|
"learning_rate": 9.776391055642225e-07, |
|
"loss": 1.1346, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5369649805447471, |
|
"grad_norm": 0.6401174657840715, |
|
"learning_rate": 9.770114942528735e-07, |
|
"loss": 1.1443, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.5447470817120622, |
|
"grad_norm": 0.6160976711769636, |
|
"learning_rate": 9.763779527559055e-07, |
|
"loss": 1.138, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5525291828793775, |
|
"grad_norm": 0.6234000690415658, |
|
"learning_rate": 9.757383966244725e-07, |
|
"loss": 1.1018, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.5603112840466926, |
|
"grad_norm": 0.6528376037799454, |
|
"learning_rate": 9.75092739798622e-07, |
|
"loss": 1.1059, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.5680933852140078, |
|
"grad_norm": 0.5652314830088928, |
|
"learning_rate": 9.744408945686901e-07, |
|
"loss": 1.1222, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.5758754863813229, |
|
"grad_norm": 0.585673991341348, |
|
"learning_rate": 9.737827715355804e-07, |
|
"loss": 1.1067, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.5836575875486382, |
|
"grad_norm": 0.5888031772006102, |
|
"learning_rate": 9.731182795698924e-07, |
|
"loss": 1.1495, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5914396887159533, |
|
"grad_norm": 0.6220423369607407, |
|
"learning_rate": 9.72447325769854e-07, |
|
"loss": 1.1197, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.5992217898832685, |
|
"grad_norm": 0.6112403683619732, |
|
"learning_rate": 9.71769815418024e-07, |
|
"loss": 1.1382, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6070038910505836, |
|
"grad_norm": 0.6459405223806135, |
|
"learning_rate": 9.710856519367158e-07, |
|
"loss": 1.1518, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6147859922178989, |
|
"grad_norm": 0.6209769863467987, |
|
"learning_rate": 9.703947368421054e-07, |
|
"loss": 1.1239, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.622568093385214, |
|
"grad_norm": 0.5843668163519747, |
|
"learning_rate": 9.696969696969695e-07, |
|
"loss": 1.1433, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.6303501945525292, |
|
"grad_norm": 0.5969841579813842, |
|
"learning_rate": 9.689922480620153e-07, |
|
"loss": 1.1312, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.6381322957198443, |
|
"grad_norm": 0.5567475366696957, |
|
"learning_rate": 9.68280467445743e-07, |
|
"loss": 1.0717, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.6459143968871596, |
|
"grad_norm": 0.8841164939701298, |
|
"learning_rate": 9.675615212527965e-07, |
|
"loss": 1.1229, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.6536964980544747, |
|
"grad_norm": 0.6021627871478876, |
|
"learning_rate": 9.668353007307475e-07, |
|
"loss": 1.119, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.6614785992217899, |
|
"grad_norm": 0.561817882301385, |
|
"learning_rate": 9.661016949152542e-07, |
|
"loss": 1.1298, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.669260700389105, |
|
"grad_norm": 0.6382338925455957, |
|
"learning_rate": 9.653605905735377e-07, |
|
"loss": 1.1168, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.6770428015564203, |
|
"grad_norm": 0.602322633909128, |
|
"learning_rate": 9.646118721461186e-07, |
|
"loss": 1.1291, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.6848249027237354, |
|
"grad_norm": 0.5990302538351283, |
|
"learning_rate": 9.63855421686747e-07, |
|
"loss": 1.1332, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.6926070038910506, |
|
"grad_norm": 0.5869777798515858, |
|
"learning_rate": 9.630911188004613e-07, |
|
"loss": 1.1404, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7003891050583657, |
|
"grad_norm": 0.5933684142522954, |
|
"learning_rate": 9.623188405797102e-07, |
|
"loss": 1.1528, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.708171206225681, |
|
"grad_norm": 0.6118209958752469, |
|
"learning_rate": 9.615384615384615e-07, |
|
"loss": 1.1486, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.7159533073929961, |
|
"grad_norm": 0.626189163793506, |
|
"learning_rate": 9.607498535442295e-07, |
|
"loss": 1.1079, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.7237354085603113, |
|
"grad_norm": 0.5824843670586056, |
|
"learning_rate": 9.599528857479386e-07, |
|
"loss": 1.1471, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.7315175097276264, |
|
"grad_norm": 0.6045894845802466, |
|
"learning_rate": 9.591474245115454e-07, |
|
"loss": 1.0899, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.7392996108949417, |
|
"grad_norm": 0.5453528811988809, |
|
"learning_rate": 9.583333333333334e-07, |
|
"loss": 1.1232, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.7470817120622568, |
|
"grad_norm": 0.6430932607963977, |
|
"learning_rate": 9.57510472770796e-07, |
|
"loss": 1.0917, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.7470817120622568, |
|
"eval_loss": 0.9749420881271362, |
|
"eval_runtime": 194.1545, |
|
"eval_samples_per_second": 13.886, |
|
"eval_steps_per_second": 0.221, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.754863813229572, |
|
"grad_norm": 0.5852779384368416, |
|
"learning_rate": 9.566787003610106e-07, |
|
"loss": 1.1517, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.7626459143968871, |
|
"grad_norm": 0.6409842274490922, |
|
"learning_rate": 9.55837870538415e-07, |
|
"loss": 1.1254, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.7704280155642024, |
|
"grad_norm": 0.5639528294869124, |
|
"learning_rate": 9.549878345498782e-07, |
|
"loss": 1.1121, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.7782101167315175, |
|
"grad_norm": 0.6213244609041039, |
|
"learning_rate": 9.541284403669725e-07, |
|
"loss": 1.0915, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7859922178988327, |
|
"grad_norm": 0.6551114483360119, |
|
"learning_rate": 9.53259532595326e-07, |
|
"loss": 1.1534, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.7937743190661478, |
|
"grad_norm": 0.6413194021766461, |
|
"learning_rate": 9.523809523809523e-07, |
|
"loss": 1.1432, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.8015564202334631, |
|
"grad_norm": 0.5912246112879433, |
|
"learning_rate": 9.514925373134328e-07, |
|
"loss": 1.1856, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.8093385214007782, |
|
"grad_norm": 0.5929209222521526, |
|
"learning_rate": 9.505941213258286e-07, |
|
"loss": 1.1097, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.8171206225680934, |
|
"grad_norm": 0.5781185505602303, |
|
"learning_rate": 9.496855345911948e-07, |
|
"loss": 1.093, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.8249027237354085, |
|
"grad_norm": 0.5850063235452896, |
|
"learning_rate": 9.487666034155598e-07, |
|
"loss": 1.1053, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.8326848249027238, |
|
"grad_norm": 0.5557542208140502, |
|
"learning_rate": 9.478371501272264e-07, |
|
"loss": 1.1072, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.8404669260700389, |
|
"grad_norm": 0.6939378070209432, |
|
"learning_rate": 9.46896992962252e-07, |
|
"loss": 1.1557, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.8482490272373541, |
|
"grad_norm": 0.5821602482631876, |
|
"learning_rate": 9.459459459459459e-07, |
|
"loss": 1.1008, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.8560311284046692, |
|
"grad_norm": 0.5641222765431608, |
|
"learning_rate": 9.449838187702264e-07, |
|
"loss": 1.1066, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.8638132295719845, |
|
"grad_norm": 0.6073853509255491, |
|
"learning_rate": 9.440104166666666e-07, |
|
"loss": 1.1347, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.8715953307392996, |
|
"grad_norm": 0.6652073708936578, |
|
"learning_rate": 9.430255402750491e-07, |
|
"loss": 1.1539, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.8793774319066148, |
|
"grad_norm": 0.5779065641712691, |
|
"learning_rate": 9.420289855072464e-07, |
|
"loss": 1.1286, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.8871595330739299, |
|
"grad_norm": 1.0893325101395284, |
|
"learning_rate": 9.410205434062292e-07, |
|
"loss": 1.1327, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.8949416342412452, |
|
"grad_norm": 0.5897769203979899, |
|
"learning_rate": 9.399999999999999e-07, |
|
"loss": 1.1083, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.9027237354085603, |
|
"grad_norm": 0.5962317172776513, |
|
"learning_rate": 9.389671361502347e-07, |
|
"loss": 1.0842, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.9105058365758755, |
|
"grad_norm": 0.5955932544091112, |
|
"learning_rate": 9.379217273954116e-07, |
|
"loss": 1.1366, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.9182879377431906, |
|
"grad_norm": 0.5949693376391756, |
|
"learning_rate": 9.368635437881874e-07, |
|
"loss": 1.1094, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.9260700389105059, |
|
"grad_norm": 0.6044514449577904, |
|
"learning_rate": 9.357923497267759e-07, |
|
"loss": 1.1639, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.933852140077821, |
|
"grad_norm": 0.625428671903296, |
|
"learning_rate": 9.347079037800687e-07, |
|
"loss": 1.1054, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.9416342412451362, |
|
"grad_norm": 0.5297520430435139, |
|
"learning_rate": 9.33609958506224e-07, |
|
"loss": 1.1396, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.9494163424124513, |
|
"grad_norm": 0.604395748227856, |
|
"learning_rate": 9.324982602644397e-07, |
|
"loss": 1.1297, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.9571984435797666, |
|
"grad_norm": 0.5935060150814615, |
|
"learning_rate": 9.313725490196079e-07, |
|
"loss": 1.1327, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.9649805447470817, |
|
"grad_norm": 0.6213246371577336, |
|
"learning_rate": 9.30232558139535e-07, |
|
"loss": 1.1655, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.9727626459143969, |
|
"grad_norm": 0.5459987203799985, |
|
"learning_rate": 9.290780141843972e-07, |
|
"loss": 1.1119, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.980544747081712, |
|
"grad_norm": 0.5862633284111678, |
|
"learning_rate": 9.279086366880799e-07, |
|
"loss": 1.1102, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.9883268482490273, |
|
"grad_norm": 0.5749701605073742, |
|
"learning_rate": 9.267241379310344e-07, |
|
"loss": 1.1215, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.9961089494163424, |
|
"grad_norm": 0.5608948491944076, |
|
"learning_rate": 9.25524222704266e-07, |
|
"loss": 1.1171, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.9961089494163424, |
|
"eval_loss": 0.9638092517852783, |
|
"eval_runtime": 193.8483, |
|
"eval_samples_per_second": 13.908, |
|
"eval_steps_per_second": 0.222, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.5608948491944076, |
|
"learning_rate": 9.243085880640464e-07, |
|
"loss": 1.1281, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.0077821011673151, |
|
"grad_norm": 0.8826714731817153, |
|
"learning_rate": 9.230769230769229e-07, |
|
"loss": 1.1385, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.0155642023346303, |
|
"grad_norm": 0.585537303127348, |
|
"learning_rate": 9.218289085545723e-07, |
|
"loss": 1.0753, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.0233463035019454, |
|
"grad_norm": 0.574845853397158, |
|
"learning_rate": 9.205642167780252e-07, |
|
"loss": 1.1466, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.0311284046692606, |
|
"grad_norm": 0.573244467973897, |
|
"learning_rate": 9.192825112107622e-07, |
|
"loss": 1.1359, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.038910505836576, |
|
"grad_norm": 0.6964714993014424, |
|
"learning_rate": 9.179834462001504e-07, |
|
"loss": 1.1122, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.046692607003891, |
|
"grad_norm": 0.641322046313728, |
|
"learning_rate": 9.166666666666665e-07, |
|
"loss": 1.0825, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.0544747081712063, |
|
"grad_norm": 0.5360716115631284, |
|
"learning_rate": 9.153318077803201e-07, |
|
"loss": 1.1006, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.0622568093385214, |
|
"grad_norm": 0.5816374932590664, |
|
"learning_rate": 9.139784946236559e-07, |
|
"loss": 1.1116, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.0700389105058365, |
|
"grad_norm": 0.6372126076146449, |
|
"learning_rate": 9.126063418406805e-07, |
|
"loss": 1.0979, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.0778210116731517, |
|
"grad_norm": 0.5578000693621826, |
|
"learning_rate": 9.11214953271028e-07, |
|
"loss": 1.1263, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.0856031128404668, |
|
"grad_norm": 0.5923651609485125, |
|
"learning_rate": 9.098039215686274e-07, |
|
"loss": 1.0966, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.0933852140077822, |
|
"grad_norm": 0.533195556847129, |
|
"learning_rate": 9.083728278041073e-07, |
|
"loss": 1.1349, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.1011673151750974, |
|
"grad_norm": 0.6329974401580136, |
|
"learning_rate": 9.069212410501193e-07, |
|
"loss": 1.1072, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.1089494163424125, |
|
"grad_norm": 0.5546033043372056, |
|
"learning_rate": 9.054487179487179e-07, |
|
"loss": 1.1565, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.1167315175097277, |
|
"grad_norm": 0.5975088088440533, |
|
"learning_rate": 9.03954802259887e-07, |
|
"loss": 1.0724, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.1245136186770428, |
|
"grad_norm": 0.57801276427658, |
|
"learning_rate": 9.024390243902439e-07, |
|
"loss": 1.1346, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.132295719844358, |
|
"grad_norm": 0.5879398504293435, |
|
"learning_rate": 9.009009009009008e-07, |
|
"loss": 1.159, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.140077821011673, |
|
"grad_norm": 0.593628488224272, |
|
"learning_rate": 8.993399339933992e-07, |
|
"loss": 1.1464, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.1478599221789882, |
|
"grad_norm": 0.5593315767964653, |
|
"learning_rate": 8.977556109725684e-07, |
|
"loss": 1.1054, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.1556420233463034, |
|
"grad_norm": 0.5787699146029086, |
|
"learning_rate": 8.96147403685092e-07, |
|
"loss": 1.096, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.1634241245136188, |
|
"grad_norm": 0.5877657497768165, |
|
"learning_rate": 8.945147679324893e-07, |
|
"loss": 1.0887, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.171206225680934, |
|
"grad_norm": 0.6197879903738175, |
|
"learning_rate": 8.928571428571428e-07, |
|
"loss": 1.1514, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.178988326848249, |
|
"grad_norm": 0.5393516857845513, |
|
"learning_rate": 8.911739502999142e-07, |
|
"loss": 1.1215, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.1867704280155642, |
|
"grad_norm": 0.5758698948439026, |
|
"learning_rate": 8.894645941278064e-07, |
|
"loss": 1.0557, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.1945525291828794, |
|
"grad_norm": 0.5870780530298951, |
|
"learning_rate": 8.877284595300261e-07, |
|
"loss": 1.0892, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.2023346303501945, |
|
"grad_norm": 0.5933780513917563, |
|
"learning_rate": 8.859649122807017e-07, |
|
"loss": 1.1125, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.2101167315175096, |
|
"grad_norm": 0.5615488624761017, |
|
"learning_rate": 8.841732979664014e-07, |
|
"loss": 1.1394, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.217898832684825, |
|
"grad_norm": 0.5960612312151845, |
|
"learning_rate": 8.823529411764706e-07, |
|
"loss": 1.0859, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.2256809338521402, |
|
"grad_norm": 0.6325808125841859, |
|
"learning_rate": 8.80503144654088e-07, |
|
"loss": 1.1128, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.2334630350194553, |
|
"grad_norm": 0.6076576722454798, |
|
"learning_rate": 8.78623188405797e-07, |
|
"loss": 1.1092, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.2412451361867705, |
|
"grad_norm": 0.5821560609671981, |
|
"learning_rate": 8.767123287671233e-07, |
|
"loss": 1.0963, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.2412451361867705, |
|
"eval_loss": 0.9557842016220093, |
|
"eval_runtime": 193.7527, |
|
"eval_samples_per_second": 13.915, |
|
"eval_steps_per_second": 0.222, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.2490272373540856, |
|
"grad_norm": 0.5621223931485095, |
|
"learning_rate": 8.747697974217309e-07, |
|
"loss": 1.1338, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.2568093385214008, |
|
"grad_norm": 0.5962800493628467, |
|
"learning_rate": 8.727948003714019e-07, |
|
"loss": 1.1388, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.264591439688716, |
|
"grad_norm": 0.5814349430158473, |
|
"learning_rate": 8.707865168539326e-07, |
|
"loss": 1.1038, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.272373540856031, |
|
"grad_norm": 0.6004347939909851, |
|
"learning_rate": 8.687440982058545e-07, |
|
"loss": 1.143, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.2801556420233462, |
|
"grad_norm": 0.5948532065914537, |
|
"learning_rate": 8.666666666666666e-07, |
|
"loss": 1.155, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.2879377431906613, |
|
"grad_norm": 0.5570114843646816, |
|
"learning_rate": 8.645533141210375e-07, |
|
"loss": 1.095, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.2957198443579767, |
|
"grad_norm": 0.6592254796476543, |
|
"learning_rate": 8.624031007751938e-07, |
|
"loss": 1.1114, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.3035019455252919, |
|
"grad_norm": 0.5375793451912569, |
|
"learning_rate": 8.602150537634409e-07, |
|
"loss": 1.1018, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.311284046692607, |
|
"grad_norm": 0.5774524273066066, |
|
"learning_rate": 8.579881656804733e-07, |
|
"loss": 1.0866, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.3190661478599222, |
|
"grad_norm": 0.5767675794480738, |
|
"learning_rate": 8.557213930348258e-07, |
|
"loss": 1.0919, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.3268482490272373, |
|
"grad_norm": 0.5424563326577124, |
|
"learning_rate": 8.534136546184737e-07, |
|
"loss": 1.097, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.3346303501945525, |
|
"grad_norm": 0.5504324773913235, |
|
"learning_rate": 8.510638297872341e-07, |
|
"loss": 1.1088, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.3424124513618678, |
|
"grad_norm": 0.5631592132878254, |
|
"learning_rate": 8.486707566462167e-07, |
|
"loss": 1.141, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.350194552529183, |
|
"grad_norm": 0.5987091201900138, |
|
"learning_rate": 8.462332301341588e-07, |
|
"loss": 1.1352, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.3579766536964981, |
|
"grad_norm": 0.5644686177164949, |
|
"learning_rate": 8.4375e-07, |
|
"loss": 1.1086, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.3657587548638133, |
|
"grad_norm": 0.5603933667416149, |
|
"learning_rate": 8.412197686645635e-07, |
|
"loss": 1.1066, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.3735408560311284, |
|
"grad_norm": 1.1467757879130558, |
|
"learning_rate": 8.386411889596603e-07, |
|
"loss": 1.1587, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.3813229571984436, |
|
"grad_norm": 0.7887321457214312, |
|
"learning_rate": 8.360128617363344e-07, |
|
"loss": 1.088, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.3891050583657587, |
|
"grad_norm": 1.3469817773930703, |
|
"learning_rate": 8.333333333333332e-07, |
|
"loss": 1.0843, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.3968871595330739, |
|
"grad_norm": 0.5467899896948426, |
|
"learning_rate": 8.306010928961747e-07, |
|
"loss": 1.1669, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.404669260700389, |
|
"grad_norm": 0.5788709158633432, |
|
"learning_rate": 8.278145695364237e-07, |
|
"loss": 1.1163, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.4124513618677041, |
|
"grad_norm": 0.5711519564174999, |
|
"learning_rate": 8.249721293199554e-07, |
|
"loss": 1.1085, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.4202334630350195, |
|
"grad_norm": 0.5648595416368181, |
|
"learning_rate": 8.220720720720721e-07, |
|
"loss": 1.0845, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.4280155642023347, |
|
"grad_norm": 0.525420807610935, |
|
"learning_rate": 8.191126279863481e-07, |
|
"loss": 1.1037, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.4357976653696498, |
|
"grad_norm": 0.6990978764976382, |
|
"learning_rate": 8.160919540229884e-07, |
|
"loss": 1.0672, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.443579766536965, |
|
"grad_norm": 0.565722083537725, |
|
"learning_rate": 8.130081300813006e-07, |
|
"loss": 1.1214, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.45136186770428, |
|
"grad_norm": 0.6283006226852244, |
|
"learning_rate": 8.098591549295774e-07, |
|
"loss": 1.0992, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.4591439688715953, |
|
"grad_norm": 0.5550197869791744, |
|
"learning_rate": 8.066429418742585e-07, |
|
"loss": 1.1136, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.4669260700389106, |
|
"grad_norm": 0.5659938483591545, |
|
"learning_rate": 8.03357314148681e-07, |
|
"loss": 1.0884, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.4747081712062258, |
|
"grad_norm": 0.5872567653519262, |
|
"learning_rate": 8e-07, |
|
"loss": 1.1283, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.482490272373541, |
|
"grad_norm": 0.6010641188429918, |
|
"learning_rate": 7.965686274509804e-07, |
|
"loss": 1.0959, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.490272373540856, |
|
"grad_norm": 0.559777631958916, |
|
"learning_rate": 7.930607187112763e-07, |
|
"loss": 1.1047, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.490272373540856, |
|
"eval_loss": 0.949611485004425, |
|
"eval_runtime": 193.3019, |
|
"eval_samples_per_second": 13.947, |
|
"eval_steps_per_second": 0.222, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.4980544747081712, |
|
"grad_norm": 0.5608029800864122, |
|
"learning_rate": 7.894736842105262e-07, |
|
"loss": 1.0798, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.5058365758754864, |
|
"grad_norm": 0.5765576143527928, |
|
"learning_rate": 7.85804816223067e-07, |
|
"loss": 1.0887, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.5136186770428015, |
|
"grad_norm": 0.5783903086304087, |
|
"learning_rate": 7.82051282051282e-07, |
|
"loss": 1.1015, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.5214007782101167, |
|
"grad_norm": 0.5252594928277367, |
|
"learning_rate": 7.782101167315173e-07, |
|
"loss": 1.0802, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.5291828793774318, |
|
"grad_norm": 0.5435740719293682, |
|
"learning_rate": 7.742782152230972e-07, |
|
"loss": 1.0874, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.536964980544747, |
|
"grad_norm": 0.5369255828107669, |
|
"learning_rate": 7.702523240371846e-07, |
|
"loss": 1.095, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.544747081712062, |
|
"grad_norm": 0.6850191931654889, |
|
"learning_rate": 7.661290322580645e-07, |
|
"loss": 1.0912, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.5525291828793775, |
|
"grad_norm": 0.5688231956299873, |
|
"learning_rate": 7.619047619047617e-07, |
|
"loss": 1.0552, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.5603112840466926, |
|
"grad_norm": 0.5928997625619618, |
|
"learning_rate": 7.575757575757575e-07, |
|
"loss": 1.0621, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.5680933852140078, |
|
"grad_norm": 0.7716125599428396, |
|
"learning_rate": 7.531380753138075e-07, |
|
"loss": 1.0758, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.575875486381323, |
|
"grad_norm": 0.5629855625488145, |
|
"learning_rate": 7.48587570621469e-07, |
|
"loss": 1.0606, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.5836575875486383, |
|
"grad_norm": 0.5635371246521241, |
|
"learning_rate": 7.439198855507868e-07, |
|
"loss": 1.1029, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.5914396887159534, |
|
"grad_norm": 0.6498217881509594, |
|
"learning_rate": 7.391304347826086e-07, |
|
"loss": 1.0737, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.5992217898832686, |
|
"grad_norm": 0.5578772825539674, |
|
"learning_rate": 7.342143906020557e-07, |
|
"loss": 1.0938, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.6070038910505837, |
|
"grad_norm": 0.6895801155757268, |
|
"learning_rate": 7.291666666666667e-07, |
|
"loss": 1.108, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.6147859922178989, |
|
"grad_norm": 0.5472071760512172, |
|
"learning_rate": 7.239819004524887e-07, |
|
"loss": 1.0799, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.622568093385214, |
|
"grad_norm": 0.5551855311336273, |
|
"learning_rate": 7.186544342507645e-07, |
|
"loss": 1.1011, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.6303501945525292, |
|
"grad_norm": 0.5922020624280246, |
|
"learning_rate": 7.131782945736434e-07, |
|
"loss": 1.0883, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.6381322957198443, |
|
"grad_norm": 0.5855500909084618, |
|
"learning_rate": 7.075471698113208e-07, |
|
"loss": 1.0277, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.6459143968871595, |
|
"grad_norm": 0.5436280978302126, |
|
"learning_rate": 7.017543859649121e-07, |
|
"loss": 1.0829, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.6536964980544746, |
|
"grad_norm": 0.568803772556928, |
|
"learning_rate": 6.957928802588997e-07, |
|
"loss": 1.0778, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.6614785992217898, |
|
"grad_norm": 0.5954446162983249, |
|
"learning_rate": 6.89655172413793e-07, |
|
"loss": 1.0885, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.669260700389105, |
|
"grad_norm": 0.6318388963180205, |
|
"learning_rate": 6.833333333333333e-07, |
|
"loss": 1.0761, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.6770428015564203, |
|
"grad_norm": 1.0424216403770146, |
|
"learning_rate": 6.768189509306259e-07, |
|
"loss": 1.0882, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.6848249027237354, |
|
"grad_norm": 0.5848369566451875, |
|
"learning_rate": 6.701030927835052e-07, |
|
"loss": 1.0933, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.6926070038910506, |
|
"grad_norm": 0.6540113258331898, |
|
"learning_rate": 6.63176265270506e-07, |
|
"loss": 1.0997, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.7003891050583657, |
|
"grad_norm": 0.5945247307598128, |
|
"learning_rate": 6.560283687943263e-07, |
|
"loss": 1.1121, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.708171206225681, |
|
"grad_norm": 0.5812576783107466, |
|
"learning_rate": 6.486486486486486e-07, |
|
"loss": 1.1089, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.7159533073929962, |
|
"grad_norm": 0.5422687962188623, |
|
"learning_rate": 6.41025641025641e-07, |
|
"loss": 1.0689, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.7237354085603114, |
|
"grad_norm": 0.5655113478395188, |
|
"learning_rate": 6.33147113594041e-07, |
|
"loss": 1.1083, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.7315175097276265, |
|
"grad_norm": 0.5479997273898842, |
|
"learning_rate": 6.249999999999999e-07, |
|
"loss": 1.0495, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.7392996108949417, |
|
"grad_norm": 0.6082276343842562, |
|
"learning_rate": 6.165703275529864e-07, |
|
"loss": 1.0847, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.7392996108949417, |
|
"eval_loss": 0.9448344707489014, |
|
"eval_runtime": 193.8348, |
|
"eval_samples_per_second": 13.909, |
|
"eval_steps_per_second": 0.222, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.7470817120622568, |
|
"grad_norm": 0.5456728126246078, |
|
"learning_rate": 6.078431372549019e-07, |
|
"loss": 1.0543, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.754863813229572, |
|
"grad_norm": 0.8395615938618125, |
|
"learning_rate": 5.988023952095807e-07, |
|
"loss": 1.1133, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.7626459143968871, |
|
"grad_norm": 0.5699433030581755, |
|
"learning_rate": 5.89430894308943e-07, |
|
"loss": 1.0878, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.7704280155642023, |
|
"grad_norm": 0.5372956205386954, |
|
"learning_rate": 5.797101449275362e-07, |
|
"loss": 1.0749, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.7782101167315174, |
|
"grad_norm": 0.5485335276065648, |
|
"learning_rate": 5.696202531645569e-07, |
|
"loss": 1.056, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.7859922178988326, |
|
"grad_norm": 0.5739239028380502, |
|
"learning_rate": 5.591397849462365e-07, |
|
"loss": 1.1175, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.7937743190661477, |
|
"grad_norm": 0.5674136555108149, |
|
"learning_rate": 5.482456140350876e-07, |
|
"loss": 1.1061, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.801556420233463, |
|
"grad_norm": 0.6414824329123738, |
|
"learning_rate": 5.369127516778523e-07, |
|
"loss": 1.1502, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.8093385214007782, |
|
"grad_norm": 0.5567692960590215, |
|
"learning_rate": 5.251141552511415e-07, |
|
"loss": 1.074, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.8171206225680934, |
|
"grad_norm": 0.5203307823955872, |
|
"learning_rate": 5.128205128205127e-07, |
|
"loss": 1.0576, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.8249027237354085, |
|
"grad_norm": 0.5480039922419879, |
|
"learning_rate": 5e-07, |
|
"loss": 1.0692, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.8326848249027239, |
|
"grad_norm": 0.6602968949965664, |
|
"learning_rate": 4.8661800486618e-07, |
|
"loss": 1.0716, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.840466926070039, |
|
"grad_norm": 0.5621257570421472, |
|
"learning_rate": 4.72636815920398e-07, |
|
"loss": 1.1222, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.8482490272373542, |
|
"grad_norm": 0.5778853314353605, |
|
"learning_rate": 4.5801526717557246e-07, |
|
"loss": 1.0651, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.8560311284046693, |
|
"grad_norm": 0.5284341924121511, |
|
"learning_rate": 4.4270833333333337e-07, |
|
"loss": 1.0708, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.8638132295719845, |
|
"grad_norm": 0.5965113916130589, |
|
"learning_rate": 4.266666666666667e-07, |
|
"loss": 1.0981, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.8715953307392996, |
|
"grad_norm": 0.5597034969209712, |
|
"learning_rate": 4.098360655737704e-07, |
|
"loss": 1.1179, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.8793774319066148, |
|
"grad_norm": 0.5536241929831267, |
|
"learning_rate": 3.92156862745098e-07, |
|
"loss": 1.0935, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.88715953307393, |
|
"grad_norm": 0.5584595178829068, |
|
"learning_rate": 3.7356321839080463e-07, |
|
"loss": 1.1041, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.894941634241245, |
|
"grad_norm": 0.5427158370327506, |
|
"learning_rate": 3.539823008849558e-07, |
|
"loss": 1.0756, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.9027237354085602, |
|
"grad_norm": 0.5336572931407305, |
|
"learning_rate": 3.333333333333333e-07, |
|
"loss": 1.0514, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.9105058365758754, |
|
"grad_norm": 0.5369948018962315, |
|
"learning_rate": 3.1152647975077885e-07, |
|
"loss": 1.1033, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.9182879377431905, |
|
"grad_norm": 0.5717043508183633, |
|
"learning_rate": 2.8846153846153846e-07, |
|
"loss": 1.0766, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.9260700389105059, |
|
"grad_norm": 0.5722908601877614, |
|
"learning_rate": 2.64026402640264e-07, |
|
"loss": 1.13, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.933852140077821, |
|
"grad_norm": 0.5419564077709611, |
|
"learning_rate": 2.3809523809523806e-07, |
|
"loss": 1.0729, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.9416342412451362, |
|
"grad_norm": 0.5886685089221407, |
|
"learning_rate": 2.1052631578947366e-07, |
|
"loss": 1.1071, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.9494163424124513, |
|
"grad_norm": 0.5403936814687964, |
|
"learning_rate": 1.8115942028985505e-07, |
|
"loss": 1.0983, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.9571984435797667, |
|
"grad_norm": 0.528388946685679, |
|
"learning_rate": 1.4981273408239696e-07, |
|
"loss": 1.1008, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.9649805447470818, |
|
"grad_norm": 0.5354770235277335, |
|
"learning_rate": 1.1627906976744186e-07, |
|
"loss": 1.1341, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.972762645914397, |
|
"grad_norm": 0.5252514289927873, |
|
"learning_rate": 8.032128514056224e-08, |
|
"loss": 1.0818, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.9805447470817121, |
|
"grad_norm": 0.5858247065123809, |
|
"learning_rate": 4.166666666666666e-08, |
|
"loss": 1.0804, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.9883268482490273, |
|
"grad_norm": 0.5257477128880242, |
|
"learning_rate": 0, |
|
"loss": 1.0901, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.9883268482490273, |
|
"eval_loss": 0.9421924352645874, |
|
"eval_runtime": 193.7536, |
|
"eval_samples_per_second": 13.915, |
|
"eval_steps_per_second": 0.222, |
|
"step": 256 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 256, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 64, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.554618867587219e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|