|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 19.958677685950413, |
|
"eval_steps": 500, |
|
"global_step": 2415, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08264462809917356, |
|
"grad_norm": 4.703310012817383, |
|
"learning_rate": 1.652892561983471e-05, |
|
"loss": 0.9413, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.1652892561983471, |
|
"grad_norm": 2.9132559299468994, |
|
"learning_rate": 3.305785123966942e-05, |
|
"loss": 0.4897, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.24793388429752067, |
|
"grad_norm": 1.5209449529647827, |
|
"learning_rate": 4.958677685950414e-05, |
|
"loss": 0.2702, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3305785123966942, |
|
"grad_norm": 0.6627697944641113, |
|
"learning_rate": 6.611570247933885e-05, |
|
"loss": 0.1735, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.4132231404958678, |
|
"grad_norm": 1.2193719148635864, |
|
"learning_rate": 8.264462809917356e-05, |
|
"loss": 0.138, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.49586776859504134, |
|
"grad_norm": 1.7384006977081299, |
|
"learning_rate": 9.917355371900827e-05, |
|
"loss": 0.1263, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5785123966942148, |
|
"grad_norm": 1.2912031412124634, |
|
"learning_rate": 0.00011570247933884298, |
|
"loss": 0.1205, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6611570247933884, |
|
"grad_norm": 0.9840922355651855, |
|
"learning_rate": 0.0001322314049586777, |
|
"loss": 0.1048, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.743801652892562, |
|
"grad_norm": 0.4457073509693146, |
|
"learning_rate": 0.0001487603305785124, |
|
"loss": 0.0894, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8264462809917356, |
|
"grad_norm": 0.7870272994041443, |
|
"learning_rate": 0.00016528925619834712, |
|
"loss": 0.075, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.5919432640075684, |
|
"learning_rate": 0.00018181818181818183, |
|
"loss": 0.0839, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9917355371900827, |
|
"grad_norm": 0.5343506932258606, |
|
"learning_rate": 0.00019834710743801655, |
|
"loss": 0.0772, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0743801652892562, |
|
"grad_norm": 0.6658118963241577, |
|
"learning_rate": 0.00019999240439374418, |
|
"loss": 0.0697, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1570247933884297, |
|
"grad_norm": 0.8337640762329102, |
|
"learning_rate": 0.0001999661494584419, |
|
"loss": 0.067, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2396694214876034, |
|
"grad_norm": 0.4294738471508026, |
|
"learning_rate": 0.00019992114634399312, |
|
"loss": 0.07, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.322314049586777, |
|
"grad_norm": 0.5833399891853333, |
|
"learning_rate": 0.00019985740349051935, |
|
"loss": 0.0684, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4049586776859504, |
|
"grad_norm": 0.5372803211212158, |
|
"learning_rate": 0.00019977493285269171, |
|
"loss": 0.0576, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.487603305785124, |
|
"grad_norm": 0.4818974435329437, |
|
"learning_rate": 0.00019967374989748876, |
|
"loss": 0.0519, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.5702479338842976, |
|
"grad_norm": 0.4126225709915161, |
|
"learning_rate": 0.0001995538736012958, |
|
"loss": 0.0542, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6528925619834711, |
|
"grad_norm": 0.4318453073501587, |
|
"learning_rate": 0.00019941532644634596, |
|
"loss": 0.0516, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7355371900826446, |
|
"grad_norm": 0.6815105676651001, |
|
"learning_rate": 0.0001992581344165037, |
|
"loss": 0.0559, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 0.5503532290458679, |
|
"learning_rate": 0.00019908232699239182, |
|
"loss": 0.0531, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9008264462809916, |
|
"grad_norm": 0.5689036846160889, |
|
"learning_rate": 0.0001988879371458622, |
|
"loss": 0.0528, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.9834710743801653, |
|
"grad_norm": 0.5625288486480713, |
|
"learning_rate": 0.00019867500133381242, |
|
"loss": 0.0541, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.0661157024793386, |
|
"grad_norm": 0.4508824646472931, |
|
"learning_rate": 0.00019844355949134824, |
|
"loss": 0.0472, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.1487603305785123, |
|
"grad_norm": 0.5423370599746704, |
|
"learning_rate": 0.000198193655024294, |
|
"loss": 0.0455, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.231404958677686, |
|
"grad_norm": 0.5978049039840698, |
|
"learning_rate": 0.00019792533480105203, |
|
"loss": 0.0453, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.3140495867768593, |
|
"grad_norm": 0.301248162984848, |
|
"learning_rate": 0.0001976386491438129, |
|
"loss": 0.0401, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.396694214876033, |
|
"grad_norm": 0.2878156006336212, |
|
"learning_rate": 0.00019733365181911748, |
|
"loss": 0.041, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.479338842975207, |
|
"grad_norm": 0.3870035707950592, |
|
"learning_rate": 0.00019701040002777332, |
|
"loss": 0.0434, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.56198347107438, |
|
"grad_norm": 0.5360531210899353, |
|
"learning_rate": 0.000196668954394127, |
|
"loss": 0.0496, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.644628099173554, |
|
"grad_norm": 0.5107744336128235, |
|
"learning_rate": 0.0001963093789546944, |
|
"loss": 0.0418, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 0.46189603209495544, |
|
"learning_rate": 0.00019593174114615068, |
|
"loss": 0.0388, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.809917355371901, |
|
"grad_norm": 0.3216039836406708, |
|
"learning_rate": 0.0001955361117926831, |
|
"loss": 0.0443, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.8925619834710745, |
|
"grad_norm": 0.5578595399856567, |
|
"learning_rate": 0.00019512256509270822, |
|
"loss": 0.0443, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.975206611570248, |
|
"grad_norm": 0.33241087198257446, |
|
"learning_rate": 0.0001946911786049562, |
|
"loss": 0.0446, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.0578512396694215, |
|
"grad_norm": 0.3146415948867798, |
|
"learning_rate": 0.0001942420332339252, |
|
"loss": 0.0388, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.1404958677685952, |
|
"grad_norm": 0.30948349833488464, |
|
"learning_rate": 0.00019377521321470805, |
|
"loss": 0.0349, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.2231404958677685, |
|
"grad_norm": 0.3389824330806732, |
|
"learning_rate": 0.00019329080609719435, |
|
"loss": 0.0387, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.3057851239669422, |
|
"grad_norm": 0.27903950214385986, |
|
"learning_rate": 0.00019278890272965096, |
|
"loss": 0.0365, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.3884297520661155, |
|
"grad_norm": 0.3887731730937958, |
|
"learning_rate": 0.00019226959724168367, |
|
"loss": 0.0398, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.4710743801652892, |
|
"grad_norm": 0.23304741084575653, |
|
"learning_rate": 0.0001917329870265839, |
|
"loss": 0.035, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.553719008264463, |
|
"grad_norm": 0.4025675654411316, |
|
"learning_rate": 0.00019117917272306283, |
|
"loss": 0.0373, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 0.4401100277900696, |
|
"learning_rate": 0.00019060825819637716, |
|
"loss": 0.0374, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.71900826446281, |
|
"grad_norm": 0.3880658447742462, |
|
"learning_rate": 0.00019002035051884973, |
|
"loss": 0.0396, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.8016528925619832, |
|
"grad_norm": 0.3345029950141907, |
|
"learning_rate": 0.00018941555994978848, |
|
"loss": 0.0332, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.884297520661157, |
|
"grad_norm": 0.297284334897995, |
|
"learning_rate": 0.00018879399991480792, |
|
"loss": 0.0321, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.9669421487603307, |
|
"grad_norm": 0.27577996253967285, |
|
"learning_rate": 0.00018815578698455675, |
|
"loss": 0.0344, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.049586776859504, |
|
"grad_norm": 0.2191019505262375, |
|
"learning_rate": 0.00018750104085285546, |
|
"loss": 0.0308, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.132231404958677, |
|
"grad_norm": 0.2145395576953888, |
|
"learning_rate": 0.0001868298843142484, |
|
"loss": 0.0319, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.214876033057851, |
|
"grad_norm": 0.2143179327249527, |
|
"learning_rate": 0.0001861424432409742, |
|
"loss": 0.031, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.297520661157025, |
|
"grad_norm": 0.6055622100830078, |
|
"learning_rate": 0.0001854388465593591, |
|
"loss": 0.0355, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.380165289256198, |
|
"grad_norm": 0.5569704174995422, |
|
"learning_rate": 0.00018471922622563728, |
|
"loss": 0.0319, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.462809917355372, |
|
"grad_norm": 0.2579587399959564, |
|
"learning_rate": 0.0001839837172012033, |
|
"loss": 0.0331, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 0.22480279207229614, |
|
"learning_rate": 0.00018323245742730054, |
|
"loss": 0.0306, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.628099173553719, |
|
"grad_norm": 0.260975182056427, |
|
"learning_rate": 0.00018246558779915105, |
|
"loss": 0.0342, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.710743801652892, |
|
"grad_norm": 0.35224688053131104, |
|
"learning_rate": 0.0001816832521395313, |
|
"loss": 0.0407, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.793388429752066, |
|
"grad_norm": 0.265455961227417, |
|
"learning_rate": 0.000180885597171799, |
|
"loss": 0.0332, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.87603305785124, |
|
"grad_norm": 0.2791174352169037, |
|
"learning_rate": 0.00018007277249237563, |
|
"loss": 0.0342, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.958677685950414, |
|
"grad_norm": 0.39626914262771606, |
|
"learning_rate": 0.00017924493054269046, |
|
"loss": 0.0342, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.041322314049586, |
|
"grad_norm": 0.3018089532852173, |
|
"learning_rate": 0.00017840222658059096, |
|
"loss": 0.0306, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 5.12396694214876, |
|
"grad_norm": 0.4229370653629303, |
|
"learning_rate": 0.00017754481865122474, |
|
"loss": 0.0293, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 5.206611570247934, |
|
"grad_norm": 0.35095998644828796, |
|
"learning_rate": 0.00017667286755739895, |
|
"loss": 0.0334, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 5.289256198347108, |
|
"grad_norm": 0.3846903145313263, |
|
"learning_rate": 0.00017578653682942275, |
|
"loss": 0.0346, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 5.371900826446281, |
|
"grad_norm": 0.3931705057621002, |
|
"learning_rate": 0.00017488599269443753, |
|
"loss": 0.0363, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 5.454545454545454, |
|
"grad_norm": 0.39143308997154236, |
|
"learning_rate": 0.0001739714040452422, |
|
"loss": 0.0274, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.537190082644628, |
|
"grad_norm": 0.26642906665802, |
|
"learning_rate": 0.0001730429424086179, |
|
"loss": 0.0294, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 5.619834710743802, |
|
"grad_norm": 0.27915745973587036, |
|
"learning_rate": 0.00017210078191315904, |
|
"loss": 0.0275, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 5.702479338842975, |
|
"grad_norm": 0.3474787771701813, |
|
"learning_rate": 0.0001711450992566163, |
|
"loss": 0.0249, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 5.785123966942149, |
|
"grad_norm": 0.4248145818710327, |
|
"learning_rate": 0.00017017607367275777, |
|
"loss": 0.0266, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.867768595041323, |
|
"grad_norm": 0.35364291071891785, |
|
"learning_rate": 0.00016919388689775464, |
|
"loss": 0.0295, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.950413223140496, |
|
"grad_norm": 0.32174554467201233, |
|
"learning_rate": 0.00016819872313609716, |
|
"loss": 0.033, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 6.033057851239669, |
|
"grad_norm": 0.3670883774757385, |
|
"learning_rate": 0.00016719076902604836, |
|
"loss": 0.0325, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 6.115702479338843, |
|
"grad_norm": 0.2812848687171936, |
|
"learning_rate": 0.00016617021360464045, |
|
"loss": 0.0324, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 6.198347107438017, |
|
"grad_norm": 0.2678919732570648, |
|
"learning_rate": 0.00016513724827222227, |
|
"loss": 0.0258, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 6.2809917355371905, |
|
"grad_norm": 0.33413299918174744, |
|
"learning_rate": 0.00016409206675656264, |
|
"loss": 0.0295, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 6.363636363636363, |
|
"grad_norm": 0.31874868273735046, |
|
"learning_rate": 0.00016303486507651807, |
|
"loss": 0.0267, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 6.446280991735537, |
|
"grad_norm": 0.2576686441898346, |
|
"learning_rate": 0.00016196584150527, |
|
"loss": 0.033, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 6.528925619834711, |
|
"grad_norm": 0.16514086723327637, |
|
"learning_rate": 0.0001608851965331397, |
|
"loss": 0.0305, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 6.6115702479338845, |
|
"grad_norm": 0.2784286141395569, |
|
"learning_rate": 0.0001597931328299874, |
|
"loss": 0.0288, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 6.694214876033058, |
|
"grad_norm": 0.21867933869361877, |
|
"learning_rate": 0.0001586898552072023, |
|
"loss": 0.0257, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 6.776859504132231, |
|
"grad_norm": 0.13699640333652496, |
|
"learning_rate": 0.00015757557057929127, |
|
"loss": 0.0243, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 6.859504132231405, |
|
"grad_norm": 0.2195553332567215, |
|
"learning_rate": 0.00015645048792507297, |
|
"loss": 0.0298, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 6.9421487603305785, |
|
"grad_norm": 0.21387651562690735, |
|
"learning_rate": 0.00015531481824848504, |
|
"loss": 0.0285, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 7.024793388429752, |
|
"grad_norm": 0.1597180813550949, |
|
"learning_rate": 0.00015416877453901096, |
|
"loss": 0.0259, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 7.107438016528926, |
|
"grad_norm": 0.2567303776741028, |
|
"learning_rate": 0.00015301257173173523, |
|
"loss": 0.0258, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 7.190082644628099, |
|
"grad_norm": 0.18878251314163208, |
|
"learning_rate": 0.00015184642666703308, |
|
"loss": 0.0293, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 7.2727272727272725, |
|
"grad_norm": 0.24876275658607483, |
|
"learning_rate": 0.00015067055804990318, |
|
"loss": 0.0283, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 7.355371900826446, |
|
"grad_norm": 0.29571253061294556, |
|
"learning_rate": 0.0001494851864089505, |
|
"loss": 0.0288, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 7.43801652892562, |
|
"grad_norm": 0.2919931709766388, |
|
"learning_rate": 0.00014829053405502723, |
|
"loss": 0.0282, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 7.520661157024794, |
|
"grad_norm": 0.25039032101631165, |
|
"learning_rate": 0.00014708682503953956, |
|
"loss": 0.0258, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 7.6033057851239665, |
|
"grad_norm": 0.2013220340013504, |
|
"learning_rate": 0.00014587428511242766, |
|
"loss": 0.0247, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 7.68595041322314, |
|
"grad_norm": 0.3657705783843994, |
|
"learning_rate": 0.0001446531416798275, |
|
"loss": 0.0287, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 7.768595041322314, |
|
"grad_norm": 0.36600229144096375, |
|
"learning_rate": 0.00014342362376142185, |
|
"loss": 0.0271, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 7.851239669421488, |
|
"grad_norm": 0.20832963287830353, |
|
"learning_rate": 0.00014218596194748875, |
|
"loss": 0.0253, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 7.933884297520661, |
|
"grad_norm": 0.15197999775409698, |
|
"learning_rate": 0.0001409403883556554, |
|
"loss": 0.0271, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 8.016528925619834, |
|
"grad_norm": 0.16558091342449188, |
|
"learning_rate": 0.00013968713658736552, |
|
"loss": 0.0265, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 8.099173553719009, |
|
"grad_norm": 0.1627083718776703, |
|
"learning_rate": 0.00013842644168406864, |
|
"loss": 0.0234, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 8.181818181818182, |
|
"grad_norm": 0.33982279896736145, |
|
"learning_rate": 0.000137158540083139, |
|
"loss": 0.025, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 8.264462809917354, |
|
"grad_norm": 0.415449857711792, |
|
"learning_rate": 0.000135883669573533, |
|
"loss": 0.0279, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 8.347107438016529, |
|
"grad_norm": 0.3493754267692566, |
|
"learning_rate": 0.00013460206925119297, |
|
"loss": 0.0242, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 8.429752066115702, |
|
"grad_norm": 0.1715659201145172, |
|
"learning_rate": 0.00013331397947420576, |
|
"loss": 0.031, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 8.512396694214877, |
|
"grad_norm": 0.27542078495025635, |
|
"learning_rate": 0.00013201964181772485, |
|
"loss": 0.0247, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 8.59504132231405, |
|
"grad_norm": 0.25437498092651367, |
|
"learning_rate": 0.000130719299028664, |
|
"loss": 0.0239, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 8.677685950413224, |
|
"grad_norm": 0.15187709033489227, |
|
"learning_rate": 0.00012941319498017116, |
|
"loss": 0.0219, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 8.760330578512397, |
|
"grad_norm": 0.2681483328342438, |
|
"learning_rate": 0.00012810157462589137, |
|
"loss": 0.0229, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 8.84297520661157, |
|
"grad_norm": 0.1943976879119873, |
|
"learning_rate": 0.00012678468395402662, |
|
"loss": 0.0224, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 8.925619834710744, |
|
"grad_norm": 0.25881528854370117, |
|
"learning_rate": 0.00012546276994120228, |
|
"loss": 0.026, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 9.008264462809917, |
|
"grad_norm": 0.21106192469596863, |
|
"learning_rate": 0.0001241360805061474, |
|
"loss": 0.0208, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 9.090909090909092, |
|
"grad_norm": 0.2531886398792267, |
|
"learning_rate": 0.00012280486446319895, |
|
"loss": 0.0257, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 9.173553719008265, |
|
"grad_norm": 0.22699125111103058, |
|
"learning_rate": 0.00012146937147563793, |
|
"loss": 0.0172, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 9.256198347107437, |
|
"grad_norm": 0.1889602094888687, |
|
"learning_rate": 0.00012012985200886602, |
|
"loss": 0.022, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 9.338842975206612, |
|
"grad_norm": 0.24796439707279205, |
|
"learning_rate": 0.00011878655728343204, |
|
"loss": 0.0217, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 9.421487603305785, |
|
"grad_norm": 0.2704314589500427, |
|
"learning_rate": 0.00011743973922791678, |
|
"loss": 0.0229, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 9.50413223140496, |
|
"grad_norm": 0.26962810754776, |
|
"learning_rate": 0.00011608965043168496, |
|
"loss": 0.0201, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 9.586776859504132, |
|
"grad_norm": 0.1735532283782959, |
|
"learning_rate": 0.0001147365440975132, |
|
"loss": 0.0215, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 9.669421487603305, |
|
"grad_norm": 0.15696899592876434, |
|
"learning_rate": 0.00011338067399410319, |
|
"loss": 0.0241, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 9.75206611570248, |
|
"grad_norm": 0.23969663679599762, |
|
"learning_rate": 0.00011202229440848844, |
|
"loss": 0.0238, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 9.834710743801653, |
|
"grad_norm": 0.23791910707950592, |
|
"learning_rate": 0.000110661660098344, |
|
"loss": 0.0207, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 9.917355371900827, |
|
"grad_norm": 0.24661310017108917, |
|
"learning_rate": 0.00010929902624420787, |
|
"loss": 0.025, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.3029937446117401, |
|
"learning_rate": 0.00010793464840162302, |
|
"loss": 0.0198, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 10.082644628099173, |
|
"grad_norm": 0.22985076904296875, |
|
"learning_rate": 0.00010656878245320933, |
|
"loss": 0.0186, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 10.165289256198347, |
|
"grad_norm": 0.23791086673736572, |
|
"learning_rate": 0.00010520168456067381, |
|
"loss": 0.0218, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 10.24793388429752, |
|
"grad_norm": 0.23052431643009186, |
|
"learning_rate": 0.0001038336111167688, |
|
"loss": 0.0207, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 10.330578512396695, |
|
"grad_norm": 0.18408218026161194, |
|
"learning_rate": 0.00010246481869720665, |
|
"loss": 0.0189, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 10.413223140495868, |
|
"grad_norm": 0.1350499838590622, |
|
"learning_rate": 0.00010109556401254032, |
|
"loss": 0.0214, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 10.49586776859504, |
|
"grad_norm": 0.21356913447380066, |
|
"learning_rate": 9.972610386001824e-05, |
|
"loss": 0.0204, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 10.578512396694215, |
|
"grad_norm": 0.19436530768871307, |
|
"learning_rate": 9.83566950754235e-05, |
|
"loss": 0.0234, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 10.661157024793388, |
|
"grad_norm": 0.22124770283699036, |
|
"learning_rate": 9.698759448490528e-05, |
|
"loss": 0.0204, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 10.743801652892563, |
|
"grad_norm": 0.23618550598621368, |
|
"learning_rate": 9.561905885681252e-05, |
|
"loss": 0.02, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 10.826446280991735, |
|
"grad_norm": 0.22095705568790436, |
|
"learning_rate": 9.425134485353804e-05, |
|
"loss": 0.0191, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 10.909090909090908, |
|
"grad_norm": 0.1535860002040863, |
|
"learning_rate": 9.288470898338283e-05, |
|
"loss": 0.0205, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 10.991735537190083, |
|
"grad_norm": 0.23291295766830444, |
|
"learning_rate": 9.151940755244912e-05, |
|
"loss": 0.0228, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 11.074380165289256, |
|
"grad_norm": 0.22103387117385864, |
|
"learning_rate": 9.01556966165713e-05, |
|
"loss": 0.021, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 11.15702479338843, |
|
"grad_norm": 0.2173365205526352, |
|
"learning_rate": 8.879383193329409e-05, |
|
"loss": 0.0224, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 11.239669421487603, |
|
"grad_norm": 0.18998365104198456, |
|
"learning_rate": 8.743406891390607e-05, |
|
"loss": 0.0191, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 11.322314049586776, |
|
"grad_norm": 0.22178007662296295, |
|
"learning_rate": 8.607666257553881e-05, |
|
"loss": 0.0182, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 11.40495867768595, |
|
"grad_norm": 0.17217198014259338, |
|
"learning_rate": 8.472186749333941e-05, |
|
"loss": 0.017, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 11.487603305785123, |
|
"grad_norm": 0.2341386377811432, |
|
"learning_rate": 8.336993775272624e-05, |
|
"loss": 0.017, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 11.570247933884298, |
|
"grad_norm": 0.2643473744392395, |
|
"learning_rate": 8.202112690173643e-05, |
|
"loss": 0.0182, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 11.652892561983471, |
|
"grad_norm": 0.20025375485420227, |
|
"learning_rate": 8.067568790347394e-05, |
|
"loss": 0.0184, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 11.735537190082646, |
|
"grad_norm": 0.3023671507835388, |
|
"learning_rate": 7.933387308866793e-05, |
|
"loss": 0.0196, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 11.818181818181818, |
|
"grad_norm": 0.3398241400718689, |
|
"learning_rate": 7.799593410834882e-05, |
|
"loss": 0.0219, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 11.900826446280991, |
|
"grad_norm": 0.26556509733200073, |
|
"learning_rate": 7.666212188665278e-05, |
|
"loss": 0.0167, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 11.983471074380166, |
|
"grad_norm": 0.16627757251262665, |
|
"learning_rate": 7.533268657376175e-05, |
|
"loss": 0.0173, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 12.066115702479339, |
|
"grad_norm": 0.1822843700647354, |
|
"learning_rate": 7.400787749898919e-05, |
|
"loss": 0.0211, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 12.148760330578513, |
|
"grad_norm": 0.20193220674991608, |
|
"learning_rate": 7.268794312401934e-05, |
|
"loss": 0.0158, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 12.231404958677686, |
|
"grad_norm": 0.16035108268260956, |
|
"learning_rate": 7.137313099630965e-05, |
|
"loss": 0.0181, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 12.314049586776859, |
|
"grad_norm": 0.14358443021774292, |
|
"learning_rate": 7.006368770266421e-05, |
|
"loss": 0.0176, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 12.396694214876034, |
|
"grad_norm": 0.2652245759963989, |
|
"learning_rate": 6.875985882298767e-05, |
|
"loss": 0.0151, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 12.479338842975206, |
|
"grad_norm": 0.16228072345256805, |
|
"learning_rate": 6.746188888422788e-05, |
|
"loss": 0.0189, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 12.561983471074381, |
|
"grad_norm": 0.2025652527809143, |
|
"learning_rate": 6.617002131451595e-05, |
|
"loss": 0.0175, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 12.644628099173554, |
|
"grad_norm": 0.19407929480075836, |
|
"learning_rate": 6.488449839751248e-05, |
|
"loss": 0.0195, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 12.727272727272727, |
|
"grad_norm": 0.19878722727298737, |
|
"learning_rate": 6.360556122696847e-05, |
|
"loss": 0.0179, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 12.809917355371901, |
|
"grad_norm": 0.14901714026927948, |
|
"learning_rate": 6.233344966150931e-05, |
|
"loss": 0.0174, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 12.892561983471074, |
|
"grad_norm": 0.19965632259845734, |
|
"learning_rate": 6.106840227965031e-05, |
|
"loss": 0.0202, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 12.975206611570249, |
|
"grad_norm": 0.19168788194656372, |
|
"learning_rate": 5.98106563350526e-05, |
|
"loss": 0.0184, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 13.057851239669422, |
|
"grad_norm": 0.21455532312393188, |
|
"learning_rate": 5.8560447712027176e-05, |
|
"loss": 0.0185, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 13.140495867768594, |
|
"grad_norm": 0.14643830060958862, |
|
"learning_rate": 5.7318010881296e-05, |
|
"loss": 0.0156, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 13.223140495867769, |
|
"grad_norm": 0.15653955936431885, |
|
"learning_rate": 5.6083578856018116e-05, |
|
"loss": 0.0172, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 13.305785123966942, |
|
"grad_norm": 0.19662214815616608, |
|
"learning_rate": 5.485738314808905e-05, |
|
"loss": 0.0176, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 13.388429752066116, |
|
"grad_norm": 0.17977331578731537, |
|
"learning_rate": 5.363965372472195e-05, |
|
"loss": 0.0149, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 13.47107438016529, |
|
"grad_norm": 0.16984397172927856, |
|
"learning_rate": 5.2430618965318356e-05, |
|
"loss": 0.0155, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 13.553719008264462, |
|
"grad_norm": 0.23604920506477356, |
|
"learning_rate": 5.123050561863657e-05, |
|
"loss": 0.0172, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 13.636363636363637, |
|
"grad_norm": 0.20416028797626495, |
|
"learning_rate": 5.0039538760266034e-05, |
|
"loss": 0.015, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 13.71900826446281, |
|
"grad_norm": 0.17371866106987, |
|
"learning_rate": 4.885794175041567e-05, |
|
"loss": 0.0145, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 13.801652892561984, |
|
"grad_norm": 0.18007588386535645, |
|
"learning_rate": 4.7685936192023415e-05, |
|
"loss": 0.0159, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 13.884297520661157, |
|
"grad_norm": 0.1721491813659668, |
|
"learning_rate": 4.652374188919584e-05, |
|
"loss": 0.0167, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 13.96694214876033, |
|
"grad_norm": 0.24759605526924133, |
|
"learning_rate": 4.537157680598486e-05, |
|
"loss": 0.0191, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 14.049586776859504, |
|
"grad_norm": 0.147015780210495, |
|
"learning_rate": 4.422965702550971e-05, |
|
"loss": 0.018, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 14.132231404958677, |
|
"grad_norm": 0.2071155458688736, |
|
"learning_rate": 4.309819670943139e-05, |
|
"loss": 0.0174, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 14.214876033057852, |
|
"grad_norm": 0.19617584347724915, |
|
"learning_rate": 4.197740805778779e-05, |
|
"loss": 0.0147, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 14.297520661157025, |
|
"grad_norm": 0.20283041894435883, |
|
"learning_rate": 4.086750126919655e-05, |
|
"loss": 0.0149, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 14.380165289256198, |
|
"grad_norm": 0.23828528821468353, |
|
"learning_rate": 3.976868450143329e-05, |
|
"loss": 0.0166, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 14.462809917355372, |
|
"grad_norm": 0.2454332709312439, |
|
"learning_rate": 3.868116383239259e-05, |
|
"loss": 0.016, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 14.545454545454545, |
|
"grad_norm": 0.21714092791080475, |
|
"learning_rate": 3.7605143221439066e-05, |
|
"loss": 0.0154, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 14.62809917355372, |
|
"grad_norm": 0.20886965095996857, |
|
"learning_rate": 3.6540824471155856e-05, |
|
"loss": 0.0176, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 14.710743801652892, |
|
"grad_norm": 0.20230558514595032, |
|
"learning_rate": 3.548840718949724e-05, |
|
"loss": 0.0164, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 14.793388429752067, |
|
"grad_norm": 0.18747635185718536, |
|
"learning_rate": 3.4448088752353434e-05, |
|
"loss": 0.0159, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 14.87603305785124, |
|
"grad_norm": 0.15160124003887177, |
|
"learning_rate": 3.3420064266533415e-05, |
|
"loss": 0.0153, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 14.958677685950413, |
|
"grad_norm": 0.21707528829574585, |
|
"learning_rate": 3.240452653317367e-05, |
|
"loss": 0.0163, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 15.041322314049587, |
|
"grad_norm": 0.2630583345890045, |
|
"learning_rate": 3.140166601157928e-05, |
|
"loss": 0.0153, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 15.12396694214876, |
|
"grad_norm": 0.2211904525756836, |
|
"learning_rate": 3.0411670783504053e-05, |
|
"loss": 0.0159, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 15.206611570247935, |
|
"grad_norm": 0.27094215154647827, |
|
"learning_rate": 2.94347265178769e-05, |
|
"loss": 0.0143, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 15.289256198347108, |
|
"grad_norm": 0.2170286327600479, |
|
"learning_rate": 2.847101643598028e-05, |
|
"loss": 0.0154, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 15.37190082644628, |
|
"grad_norm": 0.15928302705287933, |
|
"learning_rate": 2.7520721277088024e-05, |
|
"loss": 0.0162, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 15.454545454545455, |
|
"grad_norm": 0.20816448330879211, |
|
"learning_rate": 2.6584019264568495e-05, |
|
"loss": 0.0127, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 15.537190082644628, |
|
"grad_norm": 0.14063189923763275, |
|
"learning_rate": 2.5661086072459707e-05, |
|
"loss": 0.0168, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 15.619834710743802, |
|
"grad_norm": 0.2043224722146988, |
|
"learning_rate": 2.4752094792522372e-05, |
|
"loss": 0.0152, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 15.702479338842975, |
|
"grad_norm": 0.17056132853031158, |
|
"learning_rate": 2.385721590177754e-05, |
|
"loss": 0.0154, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 15.785123966942148, |
|
"grad_norm": 0.20709876716136932, |
|
"learning_rate": 2.2976617230534202e-05, |
|
"loss": 0.0132, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 15.867768595041323, |
|
"grad_norm": 0.14429618418216705, |
|
"learning_rate": 2.211046393091374e-05, |
|
"loss": 0.0165, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 15.950413223140496, |
|
"grad_norm": 0.15559087693691254, |
|
"learning_rate": 2.1258918445876263e-05, |
|
"loss": 0.0147, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 16.03305785123967, |
|
"grad_norm": 0.17711517214775085, |
|
"learning_rate": 2.0422140478755248e-05, |
|
"loss": 0.016, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 16.115702479338843, |
|
"grad_norm": 0.14507338404655457, |
|
"learning_rate": 1.9600286963305957e-05, |
|
"loss": 0.0126, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 16.198347107438018, |
|
"grad_norm": 0.12779353559017181, |
|
"learning_rate": 1.8793512034273242e-05, |
|
"loss": 0.0133, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 16.28099173553719, |
|
"grad_norm": 0.1657179296016693, |
|
"learning_rate": 1.8001966998484253e-05, |
|
"loss": 0.0134, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 16.363636363636363, |
|
"grad_norm": 0.17538750171661377, |
|
"learning_rate": 1.7225800306471696e-05, |
|
"loss": 0.0135, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 16.446280991735538, |
|
"grad_norm": 0.18412277102470398, |
|
"learning_rate": 1.6465157524632436e-05, |
|
"loss": 0.0138, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 16.52892561983471, |
|
"grad_norm": 0.152567520737648, |
|
"learning_rate": 1.5720181307927373e-05, |
|
"loss": 0.012, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 16.611570247933884, |
|
"grad_norm": 0.2471626251935959, |
|
"learning_rate": 1.4991011373127084e-05, |
|
"loss": 0.0149, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 16.694214876033058, |
|
"grad_norm": 0.22137291729450226, |
|
"learning_rate": 1.4277784472608635e-05, |
|
"loss": 0.0126, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 16.776859504132233, |
|
"grad_norm": 0.16841760277748108, |
|
"learning_rate": 1.3580634368708366e-05, |
|
"loss": 0.0151, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 16.859504132231404, |
|
"grad_norm": 0.17957067489624023, |
|
"learning_rate": 1.2899691808635283e-05, |
|
"loss": 0.015, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 16.94214876033058, |
|
"grad_norm": 0.17319756746292114, |
|
"learning_rate": 1.2235084499950178e-05, |
|
"loss": 0.0128, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 17.024793388429753, |
|
"grad_norm": 0.14869403839111328, |
|
"learning_rate": 1.1586937086614525e-05, |
|
"loss": 0.0133, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 17.107438016528924, |
|
"grad_norm": 0.15348997712135315, |
|
"learning_rate": 1.0955371125614223e-05, |
|
"loss": 0.0137, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 17.1900826446281, |
|
"grad_norm": 0.2330683022737503, |
|
"learning_rate": 1.0340505064162076e-05, |
|
"loss": 0.0143, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 17.272727272727273, |
|
"grad_norm": 0.17106673121452332, |
|
"learning_rate": 9.742454217483632e-06, |
|
"loss": 0.014, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 17.355371900826448, |
|
"grad_norm": 0.15164901316165924, |
|
"learning_rate": 9.161330747190321e-06, |
|
"loss": 0.0151, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 17.43801652892562, |
|
"grad_norm": 0.18551285564899445, |
|
"learning_rate": 8.59724364024409e-06, |
|
"loss": 0.011, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 17.520661157024794, |
|
"grad_norm": 0.13972045481204987, |
|
"learning_rate": 8.050298688517444e-06, |
|
"loss": 0.0104, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 17.60330578512397, |
|
"grad_norm": 0.17032235860824585, |
|
"learning_rate": 7.520598468952578e-06, |
|
"loss": 0.0136, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 17.68595041322314, |
|
"grad_norm": 0.14614145457744598, |
|
"learning_rate": 7.0082423243236995e-06, |
|
"loss": 0.0134, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 17.768595041322314, |
|
"grad_norm": 0.19540473818778992, |
|
"learning_rate": 6.513326344605608e-06, |
|
"loss": 0.013, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 17.85123966942149, |
|
"grad_norm": 0.16889305412769318, |
|
"learning_rate": 6.035943348952611e-06, |
|
"loss": 0.0128, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 17.93388429752066, |
|
"grad_norm": 0.13315258920192719, |
|
"learning_rate": 5.576182868290669e-06, |
|
"loss": 0.0102, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 18.016528925619834, |
|
"grad_norm": 0.197654590010643, |
|
"learning_rate": 5.134131128526243e-06, |
|
"loss": 0.012, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 18.09917355371901, |
|
"grad_norm": 0.16169218719005585, |
|
"learning_rate": 4.709871034375146e-06, |
|
"loss": 0.0103, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 18.181818181818183, |
|
"grad_norm": 0.1659018099308014, |
|
"learning_rate": 4.303482153814064e-06, |
|
"loss": 0.0125, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 18.264462809917354, |
|
"grad_norm": 0.3515506982803345, |
|
"learning_rate": 3.915040703157991e-06, |
|
"loss": 0.0141, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 18.34710743801653, |
|
"grad_norm": 0.1042797714471817, |
|
"learning_rate": 3.5446195327662133e-06, |
|
"loss": 0.0128, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 18.429752066115704, |
|
"grad_norm": 0.19790691137313843, |
|
"learning_rate": 3.1922881133795825e-06, |
|
"loss": 0.015, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 18.512396694214875, |
|
"grad_norm": 0.21495257318019867, |
|
"learning_rate": 2.8581125230915605e-06, |
|
"loss": 0.011, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 18.59504132231405, |
|
"grad_norm": 0.35978537797927856, |
|
"learning_rate": 2.5421554349556733e-06, |
|
"loss": 0.0119, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 18.677685950413224, |
|
"grad_norm": 0.1598854809999466, |
|
"learning_rate": 2.2444761052313856e-06, |
|
"loss": 0.0136, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 18.760330578512395, |
|
"grad_norm": 0.1521085500717163, |
|
"learning_rate": 1.965130362270917e-06, |
|
"loss": 0.0115, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 18.84297520661157, |
|
"grad_norm": 0.185346782207489, |
|
"learning_rate": 1.704170596048893e-06, |
|
"loss": 0.0134, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 18.925619834710744, |
|
"grad_norm": 0.23003143072128296, |
|
"learning_rate": 1.4616457483368263e-06, |
|
"loss": 0.0102, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 19.00826446280992, |
|
"grad_norm": 0.23142951726913452, |
|
"learning_rate": 1.2376013035243606e-06, |
|
"loss": 0.0116, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 19.09090909090909, |
|
"grad_norm": 0.12257048487663269, |
|
"learning_rate": 1.0320792800888602e-06, |
|
"loss": 0.0132, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 19.173553719008265, |
|
"grad_norm": 0.11083350330591202, |
|
"learning_rate": 8.451182227150467e-07, |
|
"loss": 0.0128, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 19.25619834710744, |
|
"grad_norm": 0.17381551861763, |
|
"learning_rate": 6.767531950661377e-07, |
|
"loss": 0.0137, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 19.33884297520661, |
|
"grad_norm": 0.20176014304161072, |
|
"learning_rate": 5.270157732077951e-07, |
|
"loss": 0.0151, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 19.421487603305785, |
|
"grad_norm": 0.17078755795955658, |
|
"learning_rate": 3.959340396862188e-07, |
|
"loss": 0.0125, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 19.50413223140496, |
|
"grad_norm": 0.23273628950119019, |
|
"learning_rate": 2.8353257826136957e-07, |
|
"loss": 0.0148, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 19.586776859504134, |
|
"grad_norm": 0.11116746068000793, |
|
"learning_rate": 1.8983246929642485e-07, |
|
"loss": 0.0119, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 19.669421487603305, |
|
"grad_norm": 0.16333059966564178, |
|
"learning_rate": 1.1485128580422944e-07, |
|
"loss": 0.0108, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 19.75206611570248, |
|
"grad_norm": 0.12590940296649933, |
|
"learning_rate": 5.860309015156551e-08, |
|
"loss": 0.0109, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 19.834710743801654, |
|
"grad_norm": 0.12979190051555634, |
|
"learning_rate": 2.1098431421862076e-08, |
|
"loss": 0.0131, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 19.917355371900825, |
|
"grad_norm": 0.18272912502288818, |
|
"learning_rate": 2.3443434366887673e-09, |
|
"loss": 0.0123, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 19.958677685950413, |
|
"step": 2415, |
|
"total_flos": 3.496526197912458e+17, |
|
"train_loss": 0.034255825163599866, |
|
"train_runtime": 2744.0812, |
|
"train_samples_per_second": 56.325, |
|
"train_steps_per_second": 0.88 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2415, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 20, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.496526197912458e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|