|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 1480, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.016891891891891893, |
|
"grad_norm": 6.5734802912605454, |
|
"learning_rate": 3.3783783783783788e-06, |
|
"loss": 1.3273, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.033783783783783786, |
|
"grad_norm": 6.1632920104999585, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 1.184, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05067567567567568, |
|
"grad_norm": 3.9191468793106843, |
|
"learning_rate": 1.0135135135135136e-05, |
|
"loss": 1.0141, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06756756756756757, |
|
"grad_norm": 6.836338097725109, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 0.9461, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08445945945945946, |
|
"grad_norm": 6.14454491803722, |
|
"learning_rate": 1.6891891891891892e-05, |
|
"loss": 0.9414, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.10135135135135136, |
|
"grad_norm": 3.011838488661273, |
|
"learning_rate": 2.0270270270270273e-05, |
|
"loss": 0.9043, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11824324324324324, |
|
"grad_norm": 3.080629137119366, |
|
"learning_rate": 2.364864864864865e-05, |
|
"loss": 0.9097, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 2.8246920844764523, |
|
"learning_rate": 2.702702702702703e-05, |
|
"loss": 0.9112, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15202702702702703, |
|
"grad_norm": 2.8003645624320987, |
|
"learning_rate": 3.0405405405405407e-05, |
|
"loss": 0.9513, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.16891891891891891, |
|
"grad_norm": 2.6192832071723293, |
|
"learning_rate": 3.3783783783783784e-05, |
|
"loss": 0.9393, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1858108108108108, |
|
"grad_norm": 2.6347662580290607, |
|
"learning_rate": 3.7162162162162165e-05, |
|
"loss": 0.9217, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.20270270270270271, |
|
"grad_norm": 2.6944921391744545, |
|
"learning_rate": 4.0540540540540545e-05, |
|
"loss": 0.9496, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2195945945945946, |
|
"grad_norm": 2.423675002168339, |
|
"learning_rate": 4.391891891891892e-05, |
|
"loss": 0.9423, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.23648648648648649, |
|
"grad_norm": 2.6803364488285326, |
|
"learning_rate": 4.72972972972973e-05, |
|
"loss": 0.9677, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2533783783783784, |
|
"grad_norm": 2.2781274624283685, |
|
"learning_rate": 4.999994383297182e-05, |
|
"loss": 0.966, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 2.4018446311363544, |
|
"learning_rate": 4.9997978016429605e-05, |
|
"loss": 0.9715, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.28716216216216217, |
|
"grad_norm": 2.3728527784166036, |
|
"learning_rate": 4.9993204128893056e-05, |
|
"loss": 0.977, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.30405405405405406, |
|
"grad_norm": 2.047106826030879, |
|
"learning_rate": 4.9985622766211935e-05, |
|
"loss": 0.9353, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.32094594594594594, |
|
"grad_norm": 2.1267608330495125, |
|
"learning_rate": 4.997523487464928e-05, |
|
"loss": 0.9432, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.33783783783783783, |
|
"grad_norm": 2.1225078532563537, |
|
"learning_rate": 4.996204175076325e-05, |
|
"loss": 0.9651, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3547297297297297, |
|
"grad_norm": 2.0337324708472906, |
|
"learning_rate": 4.99460450412453e-05, |
|
"loss": 0.9648, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3716216216216216, |
|
"grad_norm": 1.916581074879566, |
|
"learning_rate": 4.99272467427147e-05, |
|
"loss": 0.9675, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3885135135135135, |
|
"grad_norm": 2.121919880463027, |
|
"learning_rate": 4.990564920146926e-05, |
|
"loss": 0.9856, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 2.214799458005234, |
|
"learning_rate": 4.9881255113192526e-05, |
|
"loss": 0.929, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4222972972972973, |
|
"grad_norm": 2.163453291125734, |
|
"learning_rate": 4.985406752261731e-05, |
|
"loss": 0.9473, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4391891891891892, |
|
"grad_norm": 1.9198145369029436, |
|
"learning_rate": 4.982408982314565e-05, |
|
"loss": 0.9678, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.4560810810810811, |
|
"grad_norm": 2.0248877076532077, |
|
"learning_rate": 4.9791325756425264e-05, |
|
"loss": 0.9328, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.47297297297297297, |
|
"grad_norm": 1.807231483694689, |
|
"learning_rate": 4.975577941188258e-05, |
|
"loss": 0.9373, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.48986486486486486, |
|
"grad_norm": 1.8706434071195257, |
|
"learning_rate": 4.971745522621225e-05, |
|
"loss": 0.9626, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5067567567567568, |
|
"grad_norm": 1.7945270289752226, |
|
"learning_rate": 4.967635798282344e-05, |
|
"loss": 0.9705, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5236486486486487, |
|
"grad_norm": 1.8616405046783886, |
|
"learning_rate": 4.963249281124278e-05, |
|
"loss": 0.9072, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 1.666683134269227, |
|
"learning_rate": 4.958586518647407e-05, |
|
"loss": 0.898, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5574324324324325, |
|
"grad_norm": 1.594107803207033, |
|
"learning_rate": 4.953648092831505e-05, |
|
"loss": 0.9251, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5743243243243243, |
|
"grad_norm": 1.8741745620159964, |
|
"learning_rate": 4.9484346200630855e-05, |
|
"loss": 0.9687, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5912162162162162, |
|
"grad_norm": 1.905205634059141, |
|
"learning_rate": 4.9429467510584794e-05, |
|
"loss": 0.8999, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.6081081081081081, |
|
"grad_norm": 1.9962261429841193, |
|
"learning_rate": 4.937185170782607e-05, |
|
"loss": 0.9368, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 2.1664869693090165, |
|
"learning_rate": 4.931150598363494e-05, |
|
"loss": 0.9061, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6418918918918919, |
|
"grad_norm": 1.6870468109861425, |
|
"learning_rate": 4.9248437870025035e-05, |
|
"loss": 0.8989, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6587837837837838, |
|
"grad_norm": 1.6966567361912948, |
|
"learning_rate": 4.9182655238803365e-05, |
|
"loss": 0.8989, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 1.692847679116018, |
|
"learning_rate": 4.911416630058772e-05, |
|
"loss": 0.8889, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6925675675675675, |
|
"grad_norm": 1.666285584632242, |
|
"learning_rate": 4.9042979603781884e-05, |
|
"loss": 0.9038, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.7094594594594594, |
|
"grad_norm": 1.7022768095416743, |
|
"learning_rate": 4.896910403350873e-05, |
|
"loss": 0.9247, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7263513513513513, |
|
"grad_norm": 1.6583467639807945, |
|
"learning_rate": 4.8892548810501146e-05, |
|
"loss": 0.9042, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7432432432432432, |
|
"grad_norm": 1.5684890837209118, |
|
"learning_rate": 4.88133234899512e-05, |
|
"loss": 0.8875, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7601351351351351, |
|
"grad_norm": 1.7067630214310312, |
|
"learning_rate": 4.873143796031752e-05, |
|
"loss": 0.8797, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.777027027027027, |
|
"grad_norm": 1.5501274222949621, |
|
"learning_rate": 4.864690244209105e-05, |
|
"loss": 0.8903, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.793918918918919, |
|
"grad_norm": 1.6739236540233466, |
|
"learning_rate": 4.855972748651939e-05, |
|
"loss": 0.9036, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 1.5251028899529802, |
|
"learning_rate": 4.8469923974289874e-05, |
|
"loss": 0.876, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8277027027027027, |
|
"grad_norm": 1.6416347283581953, |
|
"learning_rate": 4.837750311417146e-05, |
|
"loss": 0.8777, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.8445945945945946, |
|
"grad_norm": 1.7148770313619208, |
|
"learning_rate": 4.828247644161577e-05, |
|
"loss": 0.9305, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.8614864864864865, |
|
"grad_norm": 1.6917833006591425, |
|
"learning_rate": 4.8184855817317226e-05, |
|
"loss": 0.8902, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.8783783783783784, |
|
"grad_norm": 1.4551269904823192, |
|
"learning_rate": 4.808465342573274e-05, |
|
"loss": 0.866, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8952702702702703, |
|
"grad_norm": 1.6585601416449522, |
|
"learning_rate": 4.7981881773560886e-05, |
|
"loss": 0.896, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.9121621621621622, |
|
"grad_norm": 1.5413455024296299, |
|
"learning_rate": 4.787655368818087e-05, |
|
"loss": 0.8684, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9290540540540541, |
|
"grad_norm": 1.4770164865772377, |
|
"learning_rate": 4.77686823160515e-05, |
|
"loss": 0.8704, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 1.642857550796514, |
|
"learning_rate": 4.765828112107034e-05, |
|
"loss": 0.8749, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9628378378378378, |
|
"grad_norm": 2.028272824380857, |
|
"learning_rate": 4.75453638828932e-05, |
|
"loss": 0.8654, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.9797297297297297, |
|
"grad_norm": 1.4679639397479571, |
|
"learning_rate": 4.742994469521421e-05, |
|
"loss": 0.8601, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9966216216216216, |
|
"grad_norm": 1.4327374675646831, |
|
"learning_rate": 4.7312037964006806e-05, |
|
"loss": 0.8564, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.0135135135135136, |
|
"grad_norm": 1.6103445933566929, |
|
"learning_rate": 4.719165840572557e-05, |
|
"loss": 0.6356, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0304054054054055, |
|
"grad_norm": 1.2609614774524984, |
|
"learning_rate": 4.7068821045469464e-05, |
|
"loss": 0.5569, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.0472972972972974, |
|
"grad_norm": 1.6175496296524783, |
|
"learning_rate": 4.694354121510644e-05, |
|
"loss": 0.5491, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0641891891891893, |
|
"grad_norm": 1.426861182551709, |
|
"learning_rate": 4.681583455135985e-05, |
|
"loss": 0.5354, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 1.3646672710235592, |
|
"learning_rate": 4.668571699385668e-05, |
|
"loss": 0.5427, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.097972972972973, |
|
"grad_norm": 1.5346272239009828, |
|
"learning_rate": 4.655320478313817e-05, |
|
"loss": 0.5359, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.114864864864865, |
|
"grad_norm": 1.3759835475947566, |
|
"learning_rate": 4.641831445863265e-05, |
|
"loss": 0.5638, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1317567567567568, |
|
"grad_norm": 1.4909239756270425, |
|
"learning_rate": 4.628106285659124e-05, |
|
"loss": 0.5741, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.1486486486486487, |
|
"grad_norm": 1.7461008672361147, |
|
"learning_rate": 4.614146710798645e-05, |
|
"loss": 0.5461, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.1655405405405406, |
|
"grad_norm": 1.5678203614090302, |
|
"learning_rate": 4.599954463637394e-05, |
|
"loss": 0.5898, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.1824324324324325, |
|
"grad_norm": 1.3911715983555075, |
|
"learning_rate": 4.585531315571788e-05, |
|
"loss": 0.5478, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.1993243243243243, |
|
"grad_norm": 1.521721643927216, |
|
"learning_rate": 4.570879066817991e-05, |
|
"loss": 0.5528, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.2162162162162162, |
|
"grad_norm": 1.4667462390466979, |
|
"learning_rate": 4.555999546187229e-05, |
|
"loss": 0.5754, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.2331081081081081, |
|
"grad_norm": 1.5550072973306515, |
|
"learning_rate": 4.5408946108575215e-05, |
|
"loss": 0.5708, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 1.3431454538729617, |
|
"learning_rate": 4.5255661461418854e-05, |
|
"loss": 0.5577, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.2668918918918919, |
|
"grad_norm": 1.4814401713234768, |
|
"learning_rate": 4.510016065253016e-05, |
|
"loss": 0.5643, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.2837837837837838, |
|
"grad_norm": 1.3784794077594908, |
|
"learning_rate": 4.4942463090644896e-05, |
|
"loss": 0.5925, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.3006756756756757, |
|
"grad_norm": 1.3720445672478283, |
|
"learning_rate": 4.478258845868522e-05, |
|
"loss": 0.5807, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.3175675675675675, |
|
"grad_norm": 1.4533572289367873, |
|
"learning_rate": 4.462055671130289e-05, |
|
"loss": 0.5796, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.3344594594594594, |
|
"grad_norm": 1.5146363381910304, |
|
"learning_rate": 4.445638807238865e-05, |
|
"loss": 0.5804, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.3513513513513513, |
|
"grad_norm": 1.5458264427252129, |
|
"learning_rate": 4.4290103032548094e-05, |
|
"loss": 0.5722, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.3682432432432432, |
|
"grad_norm": 1.3173722679798987, |
|
"learning_rate": 4.412172234654399e-05, |
|
"loss": 0.5531, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.385135135135135, |
|
"grad_norm": 1.4883033189800094, |
|
"learning_rate": 4.395126703070589e-05, |
|
"loss": 0.5577, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.402027027027027, |
|
"grad_norm": 1.3909336605285059, |
|
"learning_rate": 4.3778758360306986e-05, |
|
"loss": 0.578, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.4189189189189189, |
|
"grad_norm": 1.3005774948488031, |
|
"learning_rate": 4.360421786690862e-05, |
|
"loss": 0.5583, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4358108108108107, |
|
"grad_norm": 1.4342849633761854, |
|
"learning_rate": 4.3427667335672815e-05, |
|
"loss": 0.5842, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.4527027027027026, |
|
"grad_norm": 1.4154466456437151, |
|
"learning_rate": 4.324912880264326e-05, |
|
"loss": 0.5491, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.4695945945945945, |
|
"grad_norm": 1.4449023037096216, |
|
"learning_rate": 4.30686245519948e-05, |
|
"loss": 0.5459, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.4864864864864864, |
|
"grad_norm": 1.2746647387428396, |
|
"learning_rate": 4.288617711325207e-05, |
|
"loss": 0.5819, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.5033783783783785, |
|
"grad_norm": 1.447467776474505, |
|
"learning_rate": 4.2701809258477575e-05, |
|
"loss": 0.5784, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.5202702702702702, |
|
"grad_norm": 1.2391202211909191, |
|
"learning_rate": 4.251554399942928e-05, |
|
"loss": 0.5701, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5371621621621623, |
|
"grad_norm": 1.2268682468986518, |
|
"learning_rate": 4.2327404584688495e-05, |
|
"loss": 0.5995, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.554054054054054, |
|
"grad_norm": 1.3918773327038636, |
|
"learning_rate": 4.21374144967581e-05, |
|
"loss": 0.5644, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.570945945945946, |
|
"grad_norm": 1.4547926883844007, |
|
"learning_rate": 4.194559744913157e-05, |
|
"loss": 0.5642, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.5878378378378377, |
|
"grad_norm": 1.3596176232205965, |
|
"learning_rate": 4.1751977383333224e-05, |
|
"loss": 0.5591, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.6047297297297298, |
|
"grad_norm": 1.3130436197901627, |
|
"learning_rate": 4.1556578465929955e-05, |
|
"loss": 0.571, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 1.402701522056968, |
|
"learning_rate": 4.1359425085514906e-05, |
|
"loss": 0.5927, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.6385135135135136, |
|
"grad_norm": 1.2781385628050617, |
|
"learning_rate": 4.1160541849663404e-05, |
|
"loss": 0.5745, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.6554054054054053, |
|
"grad_norm": 1.3121044373462736, |
|
"learning_rate": 4.095995358186162e-05, |
|
"loss": 0.5734, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.6722972972972974, |
|
"grad_norm": 1.3358239819906057, |
|
"learning_rate": 4.0757685318408186e-05, |
|
"loss": 0.5553, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.689189189189189, |
|
"grad_norm": 1.3109247815754774, |
|
"learning_rate": 4.055376230528936e-05, |
|
"loss": 0.5621, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.7060810810810811, |
|
"grad_norm": 1.3687138517149326, |
|
"learning_rate": 4.034820999502794e-05, |
|
"loss": 0.571, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.722972972972973, |
|
"grad_norm": 1.2988387849104504, |
|
"learning_rate": 4.0141054043506406e-05, |
|
"loss": 0.5741, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.739864864864865, |
|
"grad_norm": 1.3411202779787594, |
|
"learning_rate": 3.993232030676473e-05, |
|
"loss": 0.5685, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.7567567567567568, |
|
"grad_norm": 1.2949335901614112, |
|
"learning_rate": 3.972203483777315e-05, |
|
"loss": 0.558, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.7736486486486487, |
|
"grad_norm": 1.3721749127594483, |
|
"learning_rate": 3.95102238831804e-05, |
|
"loss": 0.5915, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.7905405405405406, |
|
"grad_norm": 1.350082387281707, |
|
"learning_rate": 3.929691388003772e-05, |
|
"loss": 0.5685, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.8074324324324325, |
|
"grad_norm": 1.3245276027947501, |
|
"learning_rate": 3.908213145249916e-05, |
|
"loss": 0.5691, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.8243243243243243, |
|
"grad_norm": 1.4137310799251641, |
|
"learning_rate": 3.886590340849852e-05, |
|
"loss": 0.566, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.8412162162162162, |
|
"grad_norm": 1.356555489682696, |
|
"learning_rate": 3.864825673640326e-05, |
|
"loss": 0.551, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.8581081081081081, |
|
"grad_norm": 1.2815437787372281, |
|
"learning_rate": 3.842921860164607e-05, |
|
"loss": 0.5655, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 1.3576111263757158, |
|
"learning_rate": 3.8208816343334156e-05, |
|
"loss": 0.5497, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.8918918918918919, |
|
"grad_norm": 1.2750346506851613, |
|
"learning_rate": 3.798707747083694e-05, |
|
"loss": 0.5746, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.9087837837837838, |
|
"grad_norm": 1.317301287962323, |
|
"learning_rate": 3.776402966035251e-05, |
|
"loss": 0.584, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.9256756756756757, |
|
"grad_norm": 1.2204583503286293, |
|
"learning_rate": 3.753970075145322e-05, |
|
"loss": 0.5518, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.9425675675675675, |
|
"grad_norm": 1.2700469074124594, |
|
"learning_rate": 3.731411874361094e-05, |
|
"loss": 0.5466, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.9594594594594594, |
|
"grad_norm": 1.2657661298703708, |
|
"learning_rate": 3.7087311792702265e-05, |
|
"loss": 0.5659, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.9763513513513513, |
|
"grad_norm": 1.2536149686511529, |
|
"learning_rate": 3.685930820749433e-05, |
|
"loss": 0.5555, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.9932432432432432, |
|
"grad_norm": 1.2809671695628992, |
|
"learning_rate": 3.663013644611139e-05, |
|
"loss": 0.5563, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.010135135135135, |
|
"grad_norm": 1.103796522043604, |
|
"learning_rate": 3.639982511248289e-05, |
|
"loss": 0.3723, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.027027027027027, |
|
"grad_norm": 1.2569696258546523, |
|
"learning_rate": 3.616840295277328e-05, |
|
"loss": 0.2453, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.043918918918919, |
|
"grad_norm": 1.202522926688232, |
|
"learning_rate": 3.593589885179405e-05, |
|
"loss": 0.2582, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.060810810810811, |
|
"grad_norm": 1.141634620637362, |
|
"learning_rate": 3.5702341829398525e-05, |
|
"loss": 0.25, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.0777027027027026, |
|
"grad_norm": 1.2434462048859511, |
|
"learning_rate": 3.5467761036859736e-05, |
|
"loss": 0.2524, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.0945945945945947, |
|
"grad_norm": 1.2503774124889013, |
|
"learning_rate": 3.523218575323198e-05, |
|
"loss": 0.2426, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.1114864864864864, |
|
"grad_norm": 1.0984336540308752, |
|
"learning_rate": 3.499564538169629e-05, |
|
"loss": 0.2442, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 2.1283783783783785, |
|
"grad_norm": 1.3274920050931374, |
|
"learning_rate": 3.475816944589058e-05, |
|
"loss": 0.2426, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.14527027027027, |
|
"grad_norm": 1.2086016497404501, |
|
"learning_rate": 3.451978758622458e-05, |
|
"loss": 0.2549, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 2.1621621621621623, |
|
"grad_norm": 1.1812293557905968, |
|
"learning_rate": 3.4280529556180404e-05, |
|
"loss": 0.2383, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.179054054054054, |
|
"grad_norm": 1.4704829661444183, |
|
"learning_rate": 3.4040425218598755e-05, |
|
"loss": 0.2441, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 2.195945945945946, |
|
"grad_norm": 1.2763011019642492, |
|
"learning_rate": 3.379950454195172e-05, |
|
"loss": 0.252, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.2128378378378377, |
|
"grad_norm": 1.2711259414467864, |
|
"learning_rate": 3.355779759660223e-05, |
|
"loss": 0.2482, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.22972972972973, |
|
"grad_norm": 1.2196154702915583, |
|
"learning_rate": 3.331533455105084e-05, |
|
"loss": 0.2422, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.2466216216216215, |
|
"grad_norm": 1.0928327721884867, |
|
"learning_rate": 3.307214566817027e-05, |
|
"loss": 0.2493, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.2635135135135136, |
|
"grad_norm": 1.1577331879228163, |
|
"learning_rate": 3.2828261301428206e-05, |
|
"loss": 0.2547, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.2804054054054053, |
|
"grad_norm": 1.142585348862718, |
|
"learning_rate": 3.2583711891098665e-05, |
|
"loss": 0.2545, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.2972972972972974, |
|
"grad_norm": 1.3931724258065883, |
|
"learning_rate": 3.23385279604627e-05, |
|
"loss": 0.2555, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.314189189189189, |
|
"grad_norm": 1.12997531420638, |
|
"learning_rate": 3.209274011199861e-05, |
|
"loss": 0.2503, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.331081081081081, |
|
"grad_norm": 1.1911209479824547, |
|
"learning_rate": 3.18463790235623e-05, |
|
"loss": 0.252, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.347972972972973, |
|
"grad_norm": 1.2118767972123496, |
|
"learning_rate": 3.159947544455828e-05, |
|
"loss": 0.2489, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.364864864864865, |
|
"grad_norm": 1.1732170281780285, |
|
"learning_rate": 3.135206019210167e-05, |
|
"loss": 0.2452, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.3817567567567566, |
|
"grad_norm": 1.171003404202182, |
|
"learning_rate": 3.110416414717181e-05, |
|
"loss": 0.2565, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.3986486486486487, |
|
"grad_norm": 1.087800299348181, |
|
"learning_rate": 3.085581825075782e-05, |
|
"loss": 0.2428, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.4155405405405403, |
|
"grad_norm": 1.1798352511221857, |
|
"learning_rate": 3.060705349999677e-05, |
|
"loss": 0.2597, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.4324324324324325, |
|
"grad_norm": 1.1164619974303382, |
|
"learning_rate": 3.0357900944304774e-05, |
|
"loss": 0.2558, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.4493243243243246, |
|
"grad_norm": 1.1783745618133863, |
|
"learning_rate": 3.0108391681501564e-05, |
|
"loss": 0.2502, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.4662162162162162, |
|
"grad_norm": 1.2059356558094934, |
|
"learning_rate": 2.9858556853929048e-05, |
|
"loss": 0.2549, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.483108108108108, |
|
"grad_norm": 1.1084513675225853, |
|
"learning_rate": 2.96084276445643e-05, |
|
"loss": 0.2623, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.1268704196768717, |
|
"learning_rate": 2.9358035273127483e-05, |
|
"loss": 0.2445, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.516891891891892, |
|
"grad_norm": 1.2073238153053072, |
|
"learning_rate": 2.910741099218514e-05, |
|
"loss": 0.2523, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.5337837837837838, |
|
"grad_norm": 1.1875133895260694, |
|
"learning_rate": 2.8856586083249487e-05, |
|
"loss": 0.2587, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.5506756756756754, |
|
"grad_norm": 1.1828390695293125, |
|
"learning_rate": 2.860559185287397e-05, |
|
"loss": 0.2555, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.5675675675675675, |
|
"grad_norm": 1.1686470252895278, |
|
"learning_rate": 2.83544596287458e-05, |
|
"loss": 0.246, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.5844594594594597, |
|
"grad_norm": 1.2445669104213888, |
|
"learning_rate": 2.8103220755775776e-05, |
|
"loss": 0.2536, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.6013513513513513, |
|
"grad_norm": 1.1612663427931047, |
|
"learning_rate": 2.785190659218604e-05, |
|
"loss": 0.2511, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.618243243243243, |
|
"grad_norm": 1.169020666404516, |
|
"learning_rate": 2.760054850559603e-05, |
|
"loss": 0.2464, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.635135135135135, |
|
"grad_norm": 1.1211602172208788, |
|
"learning_rate": 2.7349177869107462e-05, |
|
"loss": 0.2425, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.652027027027027, |
|
"grad_norm": 1.1295937771546627, |
|
"learning_rate": 2.709782605738842e-05, |
|
"loss": 0.251, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.668918918918919, |
|
"grad_norm": 1.1985208404439514, |
|
"learning_rate": 2.684652444275741e-05, |
|
"loss": 0.2511, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.685810810810811, |
|
"grad_norm": 1.1712043419573621, |
|
"learning_rate": 2.6595304391267605e-05, |
|
"loss": 0.2409, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.7027027027027026, |
|
"grad_norm": 1.1600530170137202, |
|
"learning_rate": 2.634419725879193e-05, |
|
"loss": 0.2461, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.7195945945945947, |
|
"grad_norm": 1.0980080565697286, |
|
"learning_rate": 2.60932343871094e-05, |
|
"loss": 0.2351, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.7364864864864864, |
|
"grad_norm": 1.204458523946955, |
|
"learning_rate": 2.58424470999932e-05, |
|
"loss": 0.2415, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.7533783783783785, |
|
"grad_norm": 1.2137349551053236, |
|
"learning_rate": 2.5591866699301055e-05, |
|
"loss": 0.2411, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.77027027027027, |
|
"grad_norm": 1.2375233171662754, |
|
"learning_rate": 2.534152446106825e-05, |
|
"loss": 0.2429, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.7871621621621623, |
|
"grad_norm": 1.1646244068445126, |
|
"learning_rate": 2.5091451631604033e-05, |
|
"loss": 0.2359, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.804054054054054, |
|
"grad_norm": 1.1619752616590404, |
|
"learning_rate": 2.4841679423591523e-05, |
|
"loss": 0.2368, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.820945945945946, |
|
"grad_norm": 1.1047288053357993, |
|
"learning_rate": 2.4592239012191977e-05, |
|
"loss": 0.2391, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.8378378378378377, |
|
"grad_norm": 1.074588438071139, |
|
"learning_rate": 2.4343161531153647e-05, |
|
"loss": 0.234, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.85472972972973, |
|
"grad_norm": 1.1971487515181058, |
|
"learning_rate": 2.4094478068925884e-05, |
|
"loss": 0.2459, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.8716216216216215, |
|
"grad_norm": 1.18752516936673, |
|
"learning_rate": 2.3846219664778824e-05, |
|
"loss": 0.2337, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.8885135135135136, |
|
"grad_norm": 1.0648986336743749, |
|
"learning_rate": 2.3598417304929226e-05, |
|
"loss": 0.2385, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.9054054054054053, |
|
"grad_norm": 1.2273436401598625, |
|
"learning_rate": 2.3351101918672985e-05, |
|
"loss": 0.2307, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.9222972972972974, |
|
"grad_norm": 1.1562044904763753, |
|
"learning_rate": 2.3104304374524704e-05, |
|
"loss": 0.2313, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.939189189189189, |
|
"grad_norm": 1.1200371062184964, |
|
"learning_rate": 2.2858055476364822e-05, |
|
"loss": 0.2382, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.956081081081081, |
|
"grad_norm": 1.0863337573648397, |
|
"learning_rate": 2.2612385959594877e-05, |
|
"loss": 0.2357, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.972972972972973, |
|
"grad_norm": 1.1238376465153148, |
|
"learning_rate": 2.2367326487301317e-05, |
|
"loss": 0.2366, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.989864864864865, |
|
"grad_norm": 1.1679762876800548, |
|
"learning_rate": 2.2122907646428214e-05, |
|
"loss": 0.233, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 3.0067567567567566, |
|
"grad_norm": 0.7617860489835798, |
|
"learning_rate": 2.1879159943959686e-05, |
|
"loss": 0.1782, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.0236486486486487, |
|
"grad_norm": 0.7980232728533272, |
|
"learning_rate": 2.1636113803112097e-05, |
|
"loss": 0.0889, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 3.0405405405405403, |
|
"grad_norm": 1.0371057007455198, |
|
"learning_rate": 2.139379955953686e-05, |
|
"loss": 0.0855, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.0574324324324325, |
|
"grad_norm": 0.922694869510506, |
|
"learning_rate": 2.1152247457534065e-05, |
|
"loss": 0.082, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 3.074324324324324, |
|
"grad_norm": 0.8665469624615858, |
|
"learning_rate": 2.0911487646277623e-05, |
|
"loss": 0.0842, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.0912162162162162, |
|
"grad_norm": 0.8466103971895397, |
|
"learning_rate": 2.067155017605212e-05, |
|
"loss": 0.0818, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 3.108108108108108, |
|
"grad_norm": 0.8200266011697321, |
|
"learning_rate": 2.0432464994502203e-05, |
|
"loss": 0.0839, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.125, |
|
"grad_norm": 0.805011414682386, |
|
"learning_rate": 2.0194261942894628e-05, |
|
"loss": 0.0798, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 3.141891891891892, |
|
"grad_norm": 0.9572346016824955, |
|
"learning_rate": 1.995697075239365e-05, |
|
"loss": 0.0856, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.1587837837837838, |
|
"grad_norm": 0.8204526811751627, |
|
"learning_rate": 1.972062104035017e-05, |
|
"loss": 0.079, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 3.175675675675676, |
|
"grad_norm": 0.8185257809669213, |
|
"learning_rate": 1.9485242306605028e-05, |
|
"loss": 0.0865, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.1925675675675675, |
|
"grad_norm": 0.8796508468976082, |
|
"learning_rate": 1.9250863929807027e-05, |
|
"loss": 0.0786, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 3.2094594594594597, |
|
"grad_norm": 0.8756829385022001, |
|
"learning_rate": 1.9017515163746058e-05, |
|
"loss": 0.0836, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.2263513513513513, |
|
"grad_norm": 0.8157354792396803, |
|
"learning_rate": 1.878522513370177e-05, |
|
"loss": 0.0852, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 3.2432432432432434, |
|
"grad_norm": 0.8590600023617616, |
|
"learning_rate": 1.855402283280836e-05, |
|
"loss": 0.0835, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.260135135135135, |
|
"grad_norm": 0.8643752542390469, |
|
"learning_rate": 1.8323937118435786e-05, |
|
"loss": 0.0814, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 3.277027027027027, |
|
"grad_norm": 0.9225152379758252, |
|
"learning_rate": 1.8094996708587958e-05, |
|
"loss": 0.0861, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.293918918918919, |
|
"grad_norm": 0.9044834326870717, |
|
"learning_rate": 1.7867230178318334e-05, |
|
"loss": 0.0819, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.310810810810811, |
|
"grad_norm": 0.8920506863920826, |
|
"learning_rate": 1.7640665956163306e-05, |
|
"loss": 0.0809, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.3277027027027026, |
|
"grad_norm": 0.9035029361545068, |
|
"learning_rate": 1.7415332320593964e-05, |
|
"loss": 0.0782, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 3.3445945945945947, |
|
"grad_norm": 0.8458923715737863, |
|
"learning_rate": 1.719125739648648e-05, |
|
"loss": 0.0795, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.3614864864864864, |
|
"grad_norm": 0.8283053816212952, |
|
"learning_rate": 1.6968469151611766e-05, |
|
"loss": 0.0766, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 3.3783783783783785, |
|
"grad_norm": 0.8124298394489518, |
|
"learning_rate": 1.6746995393144668e-05, |
|
"loss": 0.0795, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.39527027027027, |
|
"grad_norm": 0.84974780220989, |
|
"learning_rate": 1.6526863764193228e-05, |
|
"loss": 0.0816, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 3.4121621621621623, |
|
"grad_norm": 0.9040056390996183, |
|
"learning_rate": 1.6308101740348433e-05, |
|
"loss": 0.0862, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.429054054054054, |
|
"grad_norm": 0.8503065695427434, |
|
"learning_rate": 1.6090736626254894e-05, |
|
"loss": 0.0821, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 3.445945945945946, |
|
"grad_norm": 0.922976948565284, |
|
"learning_rate": 1.5874795552202773e-05, |
|
"loss": 0.0767, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.4628378378378377, |
|
"grad_norm": 0.8359859623361058, |
|
"learning_rate": 1.5660305470741603e-05, |
|
"loss": 0.0822, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 3.47972972972973, |
|
"grad_norm": 0.7993044555598261, |
|
"learning_rate": 1.5447293153316163e-05, |
|
"loss": 0.0725, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.4966216216216215, |
|
"grad_norm": 0.8837194556202582, |
|
"learning_rate": 1.523578518692505e-05, |
|
"loss": 0.0803, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 3.5135135135135136, |
|
"grad_norm": 0.8671077384687282, |
|
"learning_rate": 1.5025807970802252e-05, |
|
"loss": 0.0759, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.5304054054054053, |
|
"grad_norm": 0.851094402030443, |
|
"learning_rate": 1.481738771312209e-05, |
|
"loss": 0.079, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 3.5472972972972974, |
|
"grad_norm": 0.8361299277027681, |
|
"learning_rate": 1.4610550427728103e-05, |
|
"loss": 0.076, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.564189189189189, |
|
"grad_norm": 0.8472817077547173, |
|
"learning_rate": 1.4405321930886161e-05, |
|
"loss": 0.0762, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 3.581081081081081, |
|
"grad_norm": 0.8496742173511822, |
|
"learning_rate": 1.4201727838062181e-05, |
|
"loss": 0.0777, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.597972972972973, |
|
"grad_norm": 0.9073909261434937, |
|
"learning_rate": 1.3999793560724966e-05, |
|
"loss": 0.0778, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 3.614864864864865, |
|
"grad_norm": 0.8148037450824118, |
|
"learning_rate": 1.3799544303174514e-05, |
|
"loss": 0.0753, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.631756756756757, |
|
"grad_norm": 0.792903387483005, |
|
"learning_rate": 1.3601005059396104e-05, |
|
"loss": 0.0743, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 3.6486486486486487, |
|
"grad_norm": 0.8138776105456261, |
|
"learning_rate": 1.3404200609940754e-05, |
|
"loss": 0.0771, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.6655405405405403, |
|
"grad_norm": 0.8412665781860845, |
|
"learning_rate": 1.3209155518832203e-05, |
|
"loss": 0.0748, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 3.6824324324324325, |
|
"grad_norm": 0.8443573738089697, |
|
"learning_rate": 1.3015894130500977e-05, |
|
"loss": 0.0729, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.6993243243243246, |
|
"grad_norm": 0.8610810221368033, |
|
"learning_rate": 1.2824440566745865e-05, |
|
"loss": 0.0744, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 3.7162162162162162, |
|
"grad_norm": 0.7968023292862391, |
|
"learning_rate": 1.2634818723723174e-05, |
|
"loss": 0.0752, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.733108108108108, |
|
"grad_norm": 0.7623771883673489, |
|
"learning_rate": 1.2447052268964122e-05, |
|
"loss": 0.0698, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 0.8911604401532162, |
|
"learning_rate": 1.2261164638420832e-05, |
|
"loss": 0.0728, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.766891891891892, |
|
"grad_norm": 0.8291626224492225, |
|
"learning_rate": 1.2077179033541139e-05, |
|
"loss": 0.0725, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 3.7837837837837838, |
|
"grad_norm": 0.7888246317862425, |
|
"learning_rate": 1.1895118418372734e-05, |
|
"loss": 0.0751, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.8006756756756754, |
|
"grad_norm": 0.7596045011769311, |
|
"learning_rate": 1.171500551669697e-05, |
|
"loss": 0.0707, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 3.8175675675675675, |
|
"grad_norm": 0.8261890861433314, |
|
"learning_rate": 1.1536862809192518e-05, |
|
"loss": 0.0699, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.8344594594594597, |
|
"grad_norm": 0.8157898272197499, |
|
"learning_rate": 1.1360712530629513e-05, |
|
"loss": 0.0739, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 3.8513513513513513, |
|
"grad_norm": 0.7953827120772039, |
|
"learning_rate": 1.1186576667094342e-05, |
|
"loss": 0.0727, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.868243243243243, |
|
"grad_norm": 0.8106925387796762, |
|
"learning_rate": 1.1014476953245423e-05, |
|
"loss": 0.0689, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 3.885135135135135, |
|
"grad_norm": 0.7915344112028949, |
|
"learning_rate": 1.0844434869600428e-05, |
|
"loss": 0.0696, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.902027027027027, |
|
"grad_norm": 0.8231236378626898, |
|
"learning_rate": 1.0676471639855235e-05, |
|
"loss": 0.0701, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 3.918918918918919, |
|
"grad_norm": 0.8574639286128795, |
|
"learning_rate": 1.0510608228234848e-05, |
|
"loss": 0.0678, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.935810810810811, |
|
"grad_norm": 0.7703198418021956, |
|
"learning_rate": 1.0346865336876809e-05, |
|
"loss": 0.0679, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 3.9527027027027026, |
|
"grad_norm": 0.8086225426905125, |
|
"learning_rate": 1.0185263403247256e-05, |
|
"loss": 0.0669, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.9695945945945947, |
|
"grad_norm": 0.8488002772643025, |
|
"learning_rate": 1.002582259759002e-05, |
|
"loss": 0.0684, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 3.9864864864864864, |
|
"grad_norm": 0.8980288101695698, |
|
"learning_rate": 9.868562820409103e-06, |
|
"loss": 0.0686, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.003378378378378, |
|
"grad_norm": 0.3986041630321839, |
|
"learning_rate": 9.713503699984825e-06, |
|
"loss": 0.0548, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 4.02027027027027, |
|
"grad_norm": 0.43726382965712174, |
|
"learning_rate": 9.560664589923895e-06, |
|
"loss": 0.0219, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.037162162162162, |
|
"grad_norm": 0.36710468007751074, |
|
"learning_rate": 9.410064566743841e-06, |
|
"loss": 0.0208, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 4.054054054054054, |
|
"grad_norm": 0.37287043834241884, |
|
"learning_rate": 9.261722427491953e-06, |
|
"loss": 0.0194, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.070945945945946, |
|
"grad_norm": 0.5533039626983974, |
|
"learning_rate": 9.115656687399149e-06, |
|
"loss": 0.02, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 4.087837837837838, |
|
"grad_norm": 0.5313826064857297, |
|
"learning_rate": 8.971885577569058e-06, |
|
"loss": 0.0177, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.10472972972973, |
|
"grad_norm": 0.44890915578901536, |
|
"learning_rate": 8.83042704270243e-06, |
|
"loss": 0.0189, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 4.121621621621622, |
|
"grad_norm": 0.5560917251637355, |
|
"learning_rate": 8.691298738857432e-06, |
|
"loss": 0.019, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.138513513513513, |
|
"grad_norm": 0.5436237010579721, |
|
"learning_rate": 8.554518031245934e-06, |
|
"loss": 0.0185, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 4.155405405405405, |
|
"grad_norm": 0.4740850367041631, |
|
"learning_rate": 8.420101992066028e-06, |
|
"loss": 0.0166, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.172297297297297, |
|
"grad_norm": 0.49052875884939057, |
|
"learning_rate": 8.288067398371214e-06, |
|
"loss": 0.0167, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 4.1891891891891895, |
|
"grad_norm": 0.43052256280300827, |
|
"learning_rate": 8.158430729976372e-06, |
|
"loss": 0.0186, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.206081081081081, |
|
"grad_norm": 0.4777607693530672, |
|
"learning_rate": 8.031208167400833e-06, |
|
"loss": 0.0194, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 4.222972972972973, |
|
"grad_norm": 0.4366669926231541, |
|
"learning_rate": 7.906415589848834e-06, |
|
"loss": 0.0175, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.239864864864865, |
|
"grad_norm": 0.4875801354190107, |
|
"learning_rate": 7.78406857322756e-06, |
|
"loss": 0.0176, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 4.256756756756757, |
|
"grad_norm": 0.5496320514624984, |
|
"learning_rate": 7.664182388203037e-06, |
|
"loss": 0.0183, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.273648648648648, |
|
"grad_norm": 0.4131315904135289, |
|
"learning_rate": 7.54677199829414e-06, |
|
"loss": 0.0175, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 4.29054054054054, |
|
"grad_norm": 0.5104334470158947, |
|
"learning_rate": 7.4318520580049444e-06, |
|
"loss": 0.0173, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.3074324324324325, |
|
"grad_norm": 0.5028181787292044, |
|
"learning_rate": 7.3194369109956e-06, |
|
"loss": 0.0193, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 4.324324324324325, |
|
"grad_norm": 0.42628529148210376, |
|
"learning_rate": 7.209540588292083e-06, |
|
"loss": 0.0158, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.341216216216216, |
|
"grad_norm": 0.44913562810367375, |
|
"learning_rate": 7.102176806534873e-06, |
|
"loss": 0.0177, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 4.358108108108108, |
|
"grad_norm": 0.5514097182218852, |
|
"learning_rate": 6.9973589662669455e-06, |
|
"loss": 0.017, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.375, |
|
"grad_norm": 0.5163780905038693, |
|
"learning_rate": 6.8951001502612065e-06, |
|
"loss": 0.0168, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 4.391891891891892, |
|
"grad_norm": 0.49290296808784145, |
|
"learning_rate": 6.7954131218875404e-06, |
|
"loss": 0.016, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.408783783783784, |
|
"grad_norm": 0.430553207351477, |
|
"learning_rate": 6.69831032351977e-06, |
|
"loss": 0.0169, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 4.425675675675675, |
|
"grad_norm": 0.4642297721283167, |
|
"learning_rate": 6.603803874982687e-06, |
|
"loss": 0.0173, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.4425675675675675, |
|
"grad_norm": 0.41074319788773356, |
|
"learning_rate": 6.511905572039298e-06, |
|
"loss": 0.0191, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 4.45945945945946, |
|
"grad_norm": 0.42290407551524545, |
|
"learning_rate": 6.422626884918559e-06, |
|
"loss": 0.0187, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.476351351351352, |
|
"grad_norm": 0.47394273027675116, |
|
"learning_rate": 6.33597895688373e-06, |
|
"loss": 0.0173, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 4.493243243243243, |
|
"grad_norm": 0.4957247279877308, |
|
"learning_rate": 6.2519726028415145e-06, |
|
"loss": 0.0176, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.510135135135135, |
|
"grad_norm": 0.40363156811514256, |
|
"learning_rate": 6.170618307992231e-06, |
|
"loss": 0.0156, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 4.527027027027027, |
|
"grad_norm": 0.46394127923496636, |
|
"learning_rate": 6.091926226521089e-06, |
|
"loss": 0.0166, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.543918918918919, |
|
"grad_norm": 0.4892778778622955, |
|
"learning_rate": 6.015906180330808e-06, |
|
"loss": 0.016, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 4.5608108108108105, |
|
"grad_norm": 0.39032273135281653, |
|
"learning_rate": 5.942567657815696e-06, |
|
"loss": 0.0168, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.577702702702703, |
|
"grad_norm": 0.4719537909270886, |
|
"learning_rate": 5.871919812677383e-06, |
|
"loss": 0.0164, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 4.594594594594595, |
|
"grad_norm": 0.4245185329209189, |
|
"learning_rate": 5.8039714627822754e-06, |
|
"loss": 0.0156, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.611486486486487, |
|
"grad_norm": 0.41475267241989133, |
|
"learning_rate": 5.738731089060995e-06, |
|
"loss": 0.0153, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 4.628378378378378, |
|
"grad_norm": 0.5352228892671703, |
|
"learning_rate": 5.676206834449797e-06, |
|
"loss": 0.0165, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.64527027027027, |
|
"grad_norm": 0.37701331899842194, |
|
"learning_rate": 5.616406502874251e-06, |
|
"loss": 0.0146, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 4.662162162162162, |
|
"grad_norm": 0.39899803271574863, |
|
"learning_rate": 5.55933755827518e-06, |
|
"loss": 0.0148, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.679054054054054, |
|
"grad_norm": 0.48339475577967467, |
|
"learning_rate": 5.505007123677063e-06, |
|
"loss": 0.0161, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 4.695945945945946, |
|
"grad_norm": 0.47920447606296807, |
|
"learning_rate": 5.453421980298957e-06, |
|
"loss": 0.0161, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.712837837837838, |
|
"grad_norm": 0.45349300791214964, |
|
"learning_rate": 5.4045885667081375e-06, |
|
"loss": 0.0162, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 4.72972972972973, |
|
"grad_norm": 0.4524858740954348, |
|
"learning_rate": 5.358512978016445e-06, |
|
"loss": 0.0156, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.746621621621622, |
|
"grad_norm": 0.4052610898199223, |
|
"learning_rate": 5.315200965119541e-06, |
|
"loss": 0.0144, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 4.763513513513513, |
|
"grad_norm": 0.4648951449721674, |
|
"learning_rate": 5.27465793397911e-06, |
|
"loss": 0.0174, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.780405405405405, |
|
"grad_norm": 0.4310666293350422, |
|
"learning_rate": 5.236888944948117e-06, |
|
"loss": 0.0157, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 4.797297297297297, |
|
"grad_norm": 0.4423747920595358, |
|
"learning_rate": 5.201898712139201e-06, |
|
"loss": 0.0139, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.8141891891891895, |
|
"grad_norm": 0.40535469669136887, |
|
"learning_rate": 5.1696916028362964e-06, |
|
"loss": 0.0161, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 4.831081081081081, |
|
"grad_norm": 0.5159753334356647, |
|
"learning_rate": 5.1402716369495194e-06, |
|
"loss": 0.0172, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.847972972972973, |
|
"grad_norm": 0.49504962870437985, |
|
"learning_rate": 5.113642486513428e-06, |
|
"loss": 0.0148, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 4.864864864864865, |
|
"grad_norm": 0.4125316130138178, |
|
"learning_rate": 5.089807475228711e-06, |
|
"loss": 0.0142, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 4.881756756756757, |
|
"grad_norm": 0.4694139851875935, |
|
"learning_rate": 5.06876957804733e-06, |
|
"loss": 0.0157, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 4.898648648648649, |
|
"grad_norm": 0.44686436058551265, |
|
"learning_rate": 5.050531420801205e-06, |
|
"loss": 0.0159, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.91554054054054, |
|
"grad_norm": 0.4838100458198807, |
|
"learning_rate": 5.03509527987448e-06, |
|
"loss": 0.0164, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 4.9324324324324325, |
|
"grad_norm": 0.44838862569063753, |
|
"learning_rate": 5.022463081919386e-06, |
|
"loss": 0.0135, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 4.949324324324325, |
|
"grad_norm": 0.33222610870493824, |
|
"learning_rate": 5.012636403615775e-06, |
|
"loss": 0.0157, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 4.966216216216216, |
|
"grad_norm": 0.4899278481273953, |
|
"learning_rate": 5.005616471474332e-06, |
|
"loss": 0.015, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 4.983108108108108, |
|
"grad_norm": 0.46395409247351443, |
|
"learning_rate": 5.001404161683473e-06, |
|
"loss": 0.0157, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 0.38146340884683544, |
|
"learning_rate": 5e-06, |
|
"loss": 0.0141, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 1480, |
|
"total_flos": 605863668678656.0, |
|
"train_loss": 0.36788542065266017, |
|
"train_runtime": 21782.9524, |
|
"train_samples_per_second": 2.171, |
|
"train_steps_per_second": 0.068 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1480, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 605863668678656.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|