diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,9046 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.0, + "eval_steps": 500, + "global_step": 1284, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.001557632398753894, + "grad_norm": 1.7232097279069922, + "learning_rate": 9.999985033870806e-06, + "loss": 0.6355, + "step": 1 + }, + { + "epoch": 0.003115264797507788, + "grad_norm": 0.7663661005911214, + "learning_rate": 9.999940135572817e-06, + "loss": 0.2544, + "step": 2 + }, + { + "epoch": 0.004672897196261682, + "grad_norm": 0.8839734168626316, + "learning_rate": 9.999865305374812e-06, + "loss": 0.3767, + "step": 3 + }, + { + "epoch": 0.006230529595015576, + "grad_norm": 0.8042784289646617, + "learning_rate": 9.999760543724761e-06, + "loss": 0.184, + "step": 4 + }, + { + "epoch": 0.00778816199376947, + "grad_norm": 0.6765931399234855, + "learning_rate": 9.999625851249816e-06, + "loss": 0.2941, + "step": 5 + }, + { + "epoch": 0.009345794392523364, + "grad_norm": 0.7193614710776666, + "learning_rate": 9.999461228756304e-06, + "loss": 0.2705, + "step": 6 + }, + { + "epoch": 0.010903426791277258, + "grad_norm": 0.7344615984114217, + "learning_rate": 9.99926667722973e-06, + "loss": 0.3259, + "step": 7 + }, + { + "epoch": 0.012461059190031152, + "grad_norm": 0.73017739071341, + "learning_rate": 9.999042197834769e-06, + "loss": 0.2852, + "step": 8 + }, + { + "epoch": 0.014018691588785047, + "grad_norm": 0.592507619306345, + "learning_rate": 9.998787791915254e-06, + "loss": 0.2649, + "step": 9 + }, + { + "epoch": 0.01557632398753894, + "grad_norm": 0.9399500241178238, + "learning_rate": 9.998503460994176e-06, + "loss": 0.2521, + "step": 10 + }, + { + "epoch": 0.017133956386292833, + "grad_norm": 0.7765731965837874, + "learning_rate": 9.998189206773665e-06, + "loss": 0.3556, + "step": 11 + }, + { + "epoch": 0.018691588785046728, + "grad_norm": 0.6693444621163488, + "learning_rate": 9.997845031134992e-06, + "loss": 0.2239, + "step": 12 + }, + { + "epoch": 0.020249221183800622, + "grad_norm": 1.4936804808243616, + "learning_rate": 9.997470936138547e-06, + "loss": 0.8968, + "step": 13 + }, + { + "epoch": 0.021806853582554516, + "grad_norm": 0.7687864170878725, + "learning_rate": 9.997066924023832e-06, + "loss": 0.294, + "step": 14 + }, + { + "epoch": 0.02336448598130841, + "grad_norm": 1.0982871522503537, + "learning_rate": 9.996632997209444e-06, + "loss": 0.3007, + "step": 15 + }, + { + "epoch": 0.024922118380062305, + "grad_norm": 0.9371896720117513, + "learning_rate": 9.996169158293066e-06, + "loss": 0.2438, + "step": 16 + }, + { + "epoch": 0.0264797507788162, + "grad_norm": 0.7798940457815309, + "learning_rate": 9.995675410051448e-06, + "loss": 0.2212, + "step": 17 + }, + { + "epoch": 0.028037383177570093, + "grad_norm": 0.7355604168049059, + "learning_rate": 9.99515175544039e-06, + "loss": 0.2793, + "step": 18 + }, + { + "epoch": 0.029595015576323987, + "grad_norm": 0.8219475835131775, + "learning_rate": 9.994598197594723e-06, + "loss": 0.2868, + "step": 19 + }, + { + "epoch": 0.03115264797507788, + "grad_norm": 0.8165917677127098, + "learning_rate": 9.994014739828298e-06, + "loss": 0.2703, + "step": 20 + }, + { + "epoch": 0.03271028037383177, + "grad_norm": 0.733767269375867, + "learning_rate": 9.993401385633951e-06, + "loss": 0.2059, + "step": 21 + }, + { + "epoch": 0.03426791277258567, + "grad_norm": 1.2694949029078932, + "learning_rate": 9.992758138683502e-06, + "loss": 0.9151, + "step": 22 + }, + { + "epoch": 0.03582554517133956, + "grad_norm": 0.7777986747788719, + "learning_rate": 9.992085002827719e-06, + "loss": 0.2599, + "step": 23 + }, + { + "epoch": 0.037383177570093455, + "grad_norm": 0.610655735988334, + "learning_rate": 9.991381982096293e-06, + "loss": 0.2347, + "step": 24 + }, + { + "epoch": 0.03894080996884735, + "grad_norm": 0.8562287986067203, + "learning_rate": 9.990649080697825e-06, + "loss": 0.2359, + "step": 25 + }, + { + "epoch": 0.040498442367601244, + "grad_norm": 0.6673172477309072, + "learning_rate": 9.989886303019793e-06, + "loss": 0.2271, + "step": 26 + }, + { + "epoch": 0.04205607476635514, + "grad_norm": 0.8629676432514921, + "learning_rate": 9.989093653628532e-06, + "loss": 0.2304, + "step": 27 + }, + { + "epoch": 0.04361370716510903, + "grad_norm": 0.6898071747588274, + "learning_rate": 9.988271137269197e-06, + "loss": 0.3192, + "step": 28 + }, + { + "epoch": 0.045171339563862926, + "grad_norm": 0.5010462374255644, + "learning_rate": 9.987418758865743e-06, + "loss": 0.2406, + "step": 29 + }, + { + "epoch": 0.04672897196261682, + "grad_norm": 0.8259320779398918, + "learning_rate": 9.98653652352089e-06, + "loss": 0.3266, + "step": 30 + }, + { + "epoch": 0.048286604361370715, + "grad_norm": 0.6076733459531145, + "learning_rate": 9.9856244365161e-06, + "loss": 0.2997, + "step": 31 + }, + { + "epoch": 0.04984423676012461, + "grad_norm": 0.7298955355144913, + "learning_rate": 9.984682503311536e-06, + "loss": 0.2967, + "step": 32 + }, + { + "epoch": 0.0514018691588785, + "grad_norm": 0.7346554751645953, + "learning_rate": 9.983710729546038e-06, + "loss": 0.3105, + "step": 33 + }, + { + "epoch": 0.0529595015576324, + "grad_norm": 0.668130630437843, + "learning_rate": 9.98270912103708e-06, + "loss": 0.3535, + "step": 34 + }, + { + "epoch": 0.05451713395638629, + "grad_norm": 0.6379634068346577, + "learning_rate": 9.981677683780743e-06, + "loss": 0.3709, + "step": 35 + }, + { + "epoch": 0.056074766355140186, + "grad_norm": 0.798860986544702, + "learning_rate": 9.98061642395168e-06, + "loss": 0.2243, + "step": 36 + }, + { + "epoch": 0.05763239875389408, + "grad_norm": 0.6593735789582216, + "learning_rate": 9.979525347903067e-06, + "loss": 0.2541, + "step": 37 + }, + { + "epoch": 0.059190031152647975, + "grad_norm": 0.7224987244711324, + "learning_rate": 9.978404462166582e-06, + "loss": 0.2736, + "step": 38 + }, + { + "epoch": 0.06074766355140187, + "grad_norm": 0.6440291059790796, + "learning_rate": 9.977253773452349e-06, + "loss": 0.2764, + "step": 39 + }, + { + "epoch": 0.06230529595015576, + "grad_norm": 0.6222848807943162, + "learning_rate": 9.976073288648913e-06, + "loss": 0.2698, + "step": 40 + }, + { + "epoch": 0.06386292834890965, + "grad_norm": 0.7394026438854199, + "learning_rate": 9.97486301482319e-06, + "loss": 0.2522, + "step": 41 + }, + { + "epoch": 0.06542056074766354, + "grad_norm": 0.7520093536738988, + "learning_rate": 9.973622959220426e-06, + "loss": 0.2137, + "step": 42 + }, + { + "epoch": 0.06697819314641744, + "grad_norm": 0.744961609793494, + "learning_rate": 9.97235312926415e-06, + "loss": 0.2328, + "step": 43 + }, + { + "epoch": 0.06853582554517133, + "grad_norm": 0.7656967225499874, + "learning_rate": 9.971053532556143e-06, + "loss": 0.2669, + "step": 44 + }, + { + "epoch": 0.07009345794392523, + "grad_norm": 0.6382236962718162, + "learning_rate": 9.969724176876373e-06, + "loss": 0.2192, + "step": 45 + }, + { + "epoch": 0.07165109034267912, + "grad_norm": 0.6472374848660734, + "learning_rate": 9.968365070182967e-06, + "loss": 0.3144, + "step": 46 + }, + { + "epoch": 0.07320872274143302, + "grad_norm": 0.6391395623819942, + "learning_rate": 9.966976220612151e-06, + "loss": 0.2048, + "step": 47 + }, + { + "epoch": 0.07476635514018691, + "grad_norm": 0.7757553970772143, + "learning_rate": 9.965557636478203e-06, + "loss": 0.2579, + "step": 48 + }, + { + "epoch": 0.0763239875389408, + "grad_norm": 0.8133958395260938, + "learning_rate": 9.964109326273411e-06, + "loss": 0.3226, + "step": 49 + }, + { + "epoch": 0.0778816199376947, + "grad_norm": 0.4637065889612604, + "learning_rate": 9.962631298668015e-06, + "loss": 0.185, + "step": 50 + }, + { + "epoch": 0.0794392523364486, + "grad_norm": 0.6273381176066575, + "learning_rate": 9.961123562510153e-06, + "loss": 0.3509, + "step": 51 + }, + { + "epoch": 0.08099688473520249, + "grad_norm": 0.6419234115207709, + "learning_rate": 9.959586126825818e-06, + "loss": 0.2806, + "step": 52 + }, + { + "epoch": 0.08255451713395638, + "grad_norm": 0.7160133507575562, + "learning_rate": 9.95801900081879e-06, + "loss": 0.1838, + "step": 53 + }, + { + "epoch": 0.08411214953271028, + "grad_norm": 0.7848165329694581, + "learning_rate": 9.956422193870597e-06, + "loss": 0.255, + "step": 54 + }, + { + "epoch": 0.08566978193146417, + "grad_norm": 0.6040970248107416, + "learning_rate": 9.954795715540447e-06, + "loss": 0.2142, + "step": 55 + }, + { + "epoch": 0.08722741433021806, + "grad_norm": 0.6115559611935754, + "learning_rate": 9.953139575565169e-06, + "loss": 0.2668, + "step": 56 + }, + { + "epoch": 0.08878504672897196, + "grad_norm": 0.6298402847496053, + "learning_rate": 9.95145378385917e-06, + "loss": 0.2881, + "step": 57 + }, + { + "epoch": 0.09034267912772585, + "grad_norm": 0.5535053280930566, + "learning_rate": 9.949738350514358e-06, + "loss": 0.2202, + "step": 58 + }, + { + "epoch": 0.09190031152647975, + "grad_norm": 0.6352050541969153, + "learning_rate": 9.947993285800093e-06, + "loss": 0.3398, + "step": 59 + }, + { + "epoch": 0.09345794392523364, + "grad_norm": 0.826557642193822, + "learning_rate": 9.94621860016312e-06, + "loss": 0.3364, + "step": 60 + }, + { + "epoch": 0.09501557632398754, + "grad_norm": 0.7298038729692221, + "learning_rate": 9.944414304227508e-06, + "loss": 0.2634, + "step": 61 + }, + { + "epoch": 0.09657320872274143, + "grad_norm": 1.1507218720730263, + "learning_rate": 9.94258040879459e-06, + "loss": 0.8284, + "step": 62 + }, + { + "epoch": 0.09813084112149532, + "grad_norm": 0.6885391578463873, + "learning_rate": 9.94071692484289e-06, + "loss": 0.2336, + "step": 63 + }, + { + "epoch": 0.09968847352024922, + "grad_norm": 0.7334194656077329, + "learning_rate": 9.938823863528065e-06, + "loss": 0.2652, + "step": 64 + }, + { + "epoch": 0.10124610591900311, + "grad_norm": 1.0514064266169973, + "learning_rate": 9.936901236182836e-06, + "loss": 0.835, + "step": 65 + }, + { + "epoch": 0.102803738317757, + "grad_norm": 0.6334750887159478, + "learning_rate": 9.934949054316917e-06, + "loss": 0.2812, + "step": 66 + }, + { + "epoch": 0.1043613707165109, + "grad_norm": 0.4857684609448803, + "learning_rate": 9.932967329616953e-06, + "loss": 0.202, + "step": 67 + }, + { + "epoch": 0.1059190031152648, + "grad_norm": 0.690411540980916, + "learning_rate": 9.930956073946442e-06, + "loss": 0.2828, + "step": 68 + }, + { + "epoch": 0.10747663551401869, + "grad_norm": 0.6862713871911896, + "learning_rate": 9.928915299345669e-06, + "loss": 0.3246, + "step": 69 + }, + { + "epoch": 0.10903426791277258, + "grad_norm": 0.6617081291471549, + "learning_rate": 9.926845018031632e-06, + "loss": 0.2621, + "step": 70 + }, + { + "epoch": 0.11059190031152648, + "grad_norm": 0.5745643134324515, + "learning_rate": 9.924745242397968e-06, + "loss": 0.2224, + "step": 71 + }, + { + "epoch": 0.11214953271028037, + "grad_norm": 0.7300598195282793, + "learning_rate": 9.922615985014887e-06, + "loss": 0.2297, + "step": 72 + }, + { + "epoch": 0.11370716510903427, + "grad_norm": 0.6648229017872759, + "learning_rate": 9.920457258629081e-06, + "loss": 0.2461, + "step": 73 + }, + { + "epoch": 0.11526479750778816, + "grad_norm": 0.6868885744716628, + "learning_rate": 9.918269076163664e-06, + "loss": 0.2728, + "step": 74 + }, + { + "epoch": 0.11682242990654206, + "grad_norm": 0.6982895714335172, + "learning_rate": 9.916051450718085e-06, + "loss": 0.257, + "step": 75 + }, + { + "epoch": 0.11838006230529595, + "grad_norm": 0.5677598559231926, + "learning_rate": 9.91380439556805e-06, + "loss": 0.2614, + "step": 76 + }, + { + "epoch": 0.11993769470404984, + "grad_norm": 0.5071293538430349, + "learning_rate": 9.911527924165445e-06, + "loss": 0.2793, + "step": 77 + }, + { + "epoch": 0.12149532710280374, + "grad_norm": 0.7281461877976642, + "learning_rate": 9.909222050138259e-06, + "loss": 0.2861, + "step": 78 + }, + { + "epoch": 0.12305295950155763, + "grad_norm": 0.6491525810286177, + "learning_rate": 9.906886787290492e-06, + "loss": 0.2799, + "step": 79 + }, + { + "epoch": 0.12461059190031153, + "grad_norm": 0.9626139969649712, + "learning_rate": 9.904522149602087e-06, + "loss": 0.2251, + "step": 80 + }, + { + "epoch": 0.1261682242990654, + "grad_norm": 0.705551656345552, + "learning_rate": 9.902128151228827e-06, + "loss": 0.2899, + "step": 81 + }, + { + "epoch": 0.1277258566978193, + "grad_norm": 0.5796094318767273, + "learning_rate": 9.899704806502272e-06, + "loss": 0.3023, + "step": 82 + }, + { + "epoch": 0.1292834890965732, + "grad_norm": 0.5778885783153778, + "learning_rate": 9.89725212992966e-06, + "loss": 0.2596, + "step": 83 + }, + { + "epoch": 0.1308411214953271, + "grad_norm": 0.6407326481544323, + "learning_rate": 9.894770136193814e-06, + "loss": 0.3182, + "step": 84 + }, + { + "epoch": 0.13239875389408098, + "grad_norm": 0.8761396102635524, + "learning_rate": 9.892258840153075e-06, + "loss": 0.3261, + "step": 85 + }, + { + "epoch": 0.13395638629283488, + "grad_norm": 0.8248387465515045, + "learning_rate": 9.889718256841195e-06, + "loss": 0.3105, + "step": 86 + }, + { + "epoch": 0.13551401869158877, + "grad_norm": 2.168823178408131, + "learning_rate": 9.88714840146725e-06, + "loss": 0.7869, + "step": 87 + }, + { + "epoch": 0.13707165109034267, + "grad_norm": 0.6163273279750748, + "learning_rate": 9.884549289415556e-06, + "loss": 0.2249, + "step": 88 + }, + { + "epoch": 0.13862928348909656, + "grad_norm": 1.5331833228458356, + "learning_rate": 9.881920936245577e-06, + "loss": 0.7778, + "step": 89 + }, + { + "epoch": 0.14018691588785046, + "grad_norm": 1.5428867178521768, + "learning_rate": 9.879263357691815e-06, + "loss": 0.8951, + "step": 90 + }, + { + "epoch": 0.14174454828660435, + "grad_norm": 0.6177160477898084, + "learning_rate": 9.876576569663739e-06, + "loss": 0.224, + "step": 91 + }, + { + "epoch": 0.14330218068535824, + "grad_norm": 0.7981644114495157, + "learning_rate": 9.873860588245675e-06, + "loss": 0.2892, + "step": 92 + }, + { + "epoch": 0.14485981308411214, + "grad_norm": 0.600801453450908, + "learning_rate": 9.871115429696716e-06, + "loss": 0.2782, + "step": 93 + }, + { + "epoch": 0.14641744548286603, + "grad_norm": 0.6547888169420532, + "learning_rate": 9.868341110450618e-06, + "loss": 0.3347, + "step": 94 + }, + { + "epoch": 0.14797507788161993, + "grad_norm": 0.5823853177715245, + "learning_rate": 9.865537647115713e-06, + "loss": 0.2778, + "step": 95 + }, + { + "epoch": 0.14953271028037382, + "grad_norm": 0.9116578300075924, + "learning_rate": 9.862705056474795e-06, + "loss": 0.3503, + "step": 96 + }, + { + "epoch": 0.15109034267912771, + "grad_norm": 0.5426782446240797, + "learning_rate": 9.859843355485033e-06, + "loss": 0.2386, + "step": 97 + }, + { + "epoch": 0.1526479750778816, + "grad_norm": 0.5543482391953797, + "learning_rate": 9.856952561277862e-06, + "loss": 0.2344, + "step": 98 + }, + { + "epoch": 0.1542056074766355, + "grad_norm": 0.7660668413572433, + "learning_rate": 9.854032691158881e-06, + "loss": 0.2692, + "step": 99 + }, + { + "epoch": 0.1557632398753894, + "grad_norm": 0.7745787606896338, + "learning_rate": 9.851083762607752e-06, + "loss": 0.2374, + "step": 100 + }, + { + "epoch": 0.1573208722741433, + "grad_norm": 0.8283533162770704, + "learning_rate": 9.848105793278092e-06, + "loss": 0.2699, + "step": 101 + }, + { + "epoch": 0.1588785046728972, + "grad_norm": 0.5323847631668214, + "learning_rate": 9.84509880099737e-06, + "loss": 0.2177, + "step": 102 + }, + { + "epoch": 0.16043613707165108, + "grad_norm": 0.7022062850923283, + "learning_rate": 9.842062803766804e-06, + "loss": 0.2258, + "step": 103 + }, + { + "epoch": 0.16199376947040497, + "grad_norm": 1.9739756098890826, + "learning_rate": 9.838997819761241e-06, + "loss": 0.9263, + "step": 104 + }, + { + "epoch": 0.16355140186915887, + "grad_norm": 0.7651110334163559, + "learning_rate": 9.835903867329061e-06, + "loss": 0.2654, + "step": 105 + }, + { + "epoch": 0.16510903426791276, + "grad_norm": 0.6634416278579489, + "learning_rate": 9.832780964992059e-06, + "loss": 0.2959, + "step": 106 + }, + { + "epoch": 0.16666666666666666, + "grad_norm": 0.62947915393871, + "learning_rate": 9.829629131445342e-06, + "loss": 0.2608, + "step": 107 + }, + { + "epoch": 0.16822429906542055, + "grad_norm": 1.55385845539561, + "learning_rate": 9.826448385557208e-06, + "loss": 0.842, + "step": 108 + }, + { + "epoch": 0.16978193146417445, + "grad_norm": 0.6067362647236724, + "learning_rate": 9.823238746369038e-06, + "loss": 0.3388, + "step": 109 + }, + { + "epoch": 0.17133956386292834, + "grad_norm": 0.7095716481506402, + "learning_rate": 9.820000233095179e-06, + "loss": 0.2953, + "step": 110 + }, + { + "epoch": 0.17289719626168223, + "grad_norm": 1.4374378842104667, + "learning_rate": 9.81673286512284e-06, + "loss": 0.7775, + "step": 111 + }, + { + "epoch": 0.17445482866043613, + "grad_norm": 0.6761770106689733, + "learning_rate": 9.813436662011958e-06, + "loss": 0.2929, + "step": 112 + }, + { + "epoch": 0.17601246105919002, + "grad_norm": 0.7954842040051252, + "learning_rate": 9.810111643495095e-06, + "loss": 0.2734, + "step": 113 + }, + { + "epoch": 0.17757009345794392, + "grad_norm": 1.271851983971207, + "learning_rate": 9.806757829477313e-06, + "loss": 0.7987, + "step": 114 + }, + { + "epoch": 0.1791277258566978, + "grad_norm": 0.526209190777812, + "learning_rate": 9.803375240036057e-06, + "loss": 0.3054, + "step": 115 + }, + { + "epoch": 0.1806853582554517, + "grad_norm": 0.6440505513615228, + "learning_rate": 9.799963895421036e-06, + "loss": 0.2416, + "step": 116 + }, + { + "epoch": 0.1822429906542056, + "grad_norm": 0.9367999131179932, + "learning_rate": 9.7965238160541e-06, + "loss": 0.3409, + "step": 117 + }, + { + "epoch": 0.1838006230529595, + "grad_norm": 0.8415567444030029, + "learning_rate": 9.793055022529116e-06, + "loss": 0.2847, + "step": 118 + }, + { + "epoch": 0.1853582554517134, + "grad_norm": 0.5786248320276434, + "learning_rate": 9.789557535611853e-06, + "loss": 0.2644, + "step": 119 + }, + { + "epoch": 0.18691588785046728, + "grad_norm": 0.687463433264138, + "learning_rate": 9.786031376239842e-06, + "loss": 0.2569, + "step": 120 + }, + { + "epoch": 0.18847352024922118, + "grad_norm": 0.45347828103414345, + "learning_rate": 9.78247656552227e-06, + "loss": 0.2415, + "step": 121 + }, + { + "epoch": 0.19003115264797507, + "grad_norm": 0.5650726061282849, + "learning_rate": 9.778893124739836e-06, + "loss": 0.2547, + "step": 122 + }, + { + "epoch": 0.19158878504672897, + "grad_norm": 0.5103395837306397, + "learning_rate": 9.775281075344639e-06, + "loss": 0.3065, + "step": 123 + }, + { + "epoch": 0.19314641744548286, + "grad_norm": 0.5183636563691485, + "learning_rate": 9.771640438960037e-06, + "loss": 0.2922, + "step": 124 + }, + { + "epoch": 0.19470404984423675, + "grad_norm": 0.5026042818767394, + "learning_rate": 9.76797123738052e-06, + "loss": 0.2588, + "step": 125 + }, + { + "epoch": 0.19626168224299065, + "grad_norm": 0.6794540572358105, + "learning_rate": 9.76427349257159e-06, + "loss": 0.3041, + "step": 126 + }, + { + "epoch": 0.19781931464174454, + "grad_norm": 0.6283325050944261, + "learning_rate": 9.76054722666962e-06, + "loss": 0.2991, + "step": 127 + }, + { + "epoch": 0.19937694704049844, + "grad_norm": 0.7184815540245492, + "learning_rate": 9.756792461981713e-06, + "loss": 0.2599, + "step": 128 + }, + { + "epoch": 0.20093457943925233, + "grad_norm": 0.701839843872524, + "learning_rate": 9.753009220985593e-06, + "loss": 0.2711, + "step": 129 + }, + { + "epoch": 0.20249221183800623, + "grad_norm": 0.6505643462960924, + "learning_rate": 9.749197526329446e-06, + "loss": 0.2947, + "step": 130 + }, + { + "epoch": 0.20404984423676012, + "grad_norm": 0.6104336914925119, + "learning_rate": 9.745357400831799e-06, + "loss": 0.2331, + "step": 131 + }, + { + "epoch": 0.205607476635514, + "grad_norm": 0.908932761888463, + "learning_rate": 9.741488867481377e-06, + "loss": 0.2377, + "step": 132 + }, + { + "epoch": 0.2071651090342679, + "grad_norm": 0.5980236644974856, + "learning_rate": 9.737591949436969e-06, + "loss": 0.1601, + "step": 133 + }, + { + "epoch": 0.2087227414330218, + "grad_norm": 0.5937884904455855, + "learning_rate": 9.733666670027288e-06, + "loss": 0.2542, + "step": 134 + }, + { + "epoch": 0.2102803738317757, + "grad_norm": 0.7666646448496817, + "learning_rate": 9.729713052750827e-06, + "loss": 0.2465, + "step": 135 + }, + { + "epoch": 0.2118380062305296, + "grad_norm": 0.5386370084617528, + "learning_rate": 9.725731121275725e-06, + "loss": 0.3172, + "step": 136 + }, + { + "epoch": 0.21339563862928349, + "grad_norm": 0.5517332858518383, + "learning_rate": 9.721720899439621e-06, + "loss": 0.2864, + "step": 137 + }, + { + "epoch": 0.21495327102803738, + "grad_norm": 0.60004714901346, + "learning_rate": 9.71768241124952e-06, + "loss": 0.3101, + "step": 138 + }, + { + "epoch": 0.21651090342679127, + "grad_norm": 0.7525697267510544, + "learning_rate": 9.71361568088163e-06, + "loss": 0.2235, + "step": 139 + }, + { + "epoch": 0.21806853582554517, + "grad_norm": 0.5929490381364206, + "learning_rate": 9.709520732681238e-06, + "loss": 0.231, + "step": 140 + }, + { + "epoch": 0.21962616822429906, + "grad_norm": 0.7027523851155623, + "learning_rate": 9.705397591162556e-06, + "loss": 0.2379, + "step": 141 + }, + { + "epoch": 0.22118380062305296, + "grad_norm": 0.6504000514836087, + "learning_rate": 9.701246281008568e-06, + "loss": 0.2462, + "step": 142 + }, + { + "epoch": 0.22274143302180685, + "grad_norm": 0.5770728592720746, + "learning_rate": 9.697066827070894e-06, + "loss": 0.2859, + "step": 143 + }, + { + "epoch": 0.22429906542056074, + "grad_norm": 0.5798579381876525, + "learning_rate": 9.692859254369631e-06, + "loss": 0.2663, + "step": 144 + }, + { + "epoch": 0.22585669781931464, + "grad_norm": 0.5033709259076921, + "learning_rate": 9.68862358809321e-06, + "loss": 0.1913, + "step": 145 + }, + { + "epoch": 0.22741433021806853, + "grad_norm": 0.5493196423085126, + "learning_rate": 9.684359853598245e-06, + "loss": 0.2197, + "step": 146 + }, + { + "epoch": 0.22897196261682243, + "grad_norm": 0.5652921319820055, + "learning_rate": 9.680068076409373e-06, + "loss": 0.2601, + "step": 147 + }, + { + "epoch": 0.23052959501557632, + "grad_norm": 0.6783593909741826, + "learning_rate": 9.675748282219114e-06, + "loss": 0.2922, + "step": 148 + }, + { + "epoch": 0.23208722741433022, + "grad_norm": 0.7088765017570264, + "learning_rate": 9.671400496887704e-06, + "loss": 0.2997, + "step": 149 + }, + { + "epoch": 0.2336448598130841, + "grad_norm": 0.6064540912244738, + "learning_rate": 9.667024746442953e-06, + "loss": 0.3283, + "step": 150 + }, + { + "epoch": 0.235202492211838, + "grad_norm": 0.6900794118386824, + "learning_rate": 9.662621057080077e-06, + "loss": 0.2447, + "step": 151 + }, + { + "epoch": 0.2367601246105919, + "grad_norm": 0.8898873494327856, + "learning_rate": 9.65818945516155e-06, + "loss": 0.2857, + "step": 152 + }, + { + "epoch": 0.2383177570093458, + "grad_norm": 0.7596388046727889, + "learning_rate": 9.653729967216944e-06, + "loss": 0.2765, + "step": 153 + }, + { + "epoch": 0.2398753894080997, + "grad_norm": 0.5897245550654492, + "learning_rate": 9.64924261994277e-06, + "loss": 0.2629, + "step": 154 + }, + { + "epoch": 0.24143302180685358, + "grad_norm": 0.6261944121589039, + "learning_rate": 9.644727440202308e-06, + "loss": 0.3554, + "step": 155 + }, + { + "epoch": 0.24299065420560748, + "grad_norm": 0.7471727372694826, + "learning_rate": 9.640184455025472e-06, + "loss": 0.3595, + "step": 156 + }, + { + "epoch": 0.24454828660436137, + "grad_norm": 0.6539521944383881, + "learning_rate": 9.635613691608619e-06, + "loss": 0.2351, + "step": 157 + }, + { + "epoch": 0.24610591900311526, + "grad_norm": 0.7372613573398835, + "learning_rate": 9.631015177314402e-06, + "loss": 0.3061, + "step": 158 + }, + { + "epoch": 0.24766355140186916, + "grad_norm": 0.802899194753761, + "learning_rate": 9.62638893967161e-06, + "loss": 0.3606, + "step": 159 + }, + { + "epoch": 0.24922118380062305, + "grad_norm": 0.754935748177354, + "learning_rate": 9.621735006374984e-06, + "loss": 0.223, + "step": 160 + }, + { + "epoch": 0.2507788161993769, + "grad_norm": 0.4173684214144698, + "learning_rate": 9.617053405285077e-06, + "loss": 0.2319, + "step": 161 + }, + { + "epoch": 0.2523364485981308, + "grad_norm": 0.6435417325636267, + "learning_rate": 9.612344164428063e-06, + "loss": 0.3996, + "step": 162 + }, + { + "epoch": 0.2538940809968847, + "grad_norm": 0.8830545123878781, + "learning_rate": 9.607607311995587e-06, + "loss": 0.3724, + "step": 163 + }, + { + "epoch": 0.2554517133956386, + "grad_norm": 2.2533700279001554, + "learning_rate": 9.602842876344589e-06, + "loss": 0.8261, + "step": 164 + }, + { + "epoch": 0.2570093457943925, + "grad_norm": 0.6641726752237889, + "learning_rate": 9.59805088599713e-06, + "loss": 0.3339, + "step": 165 + }, + { + "epoch": 0.2585669781931464, + "grad_norm": 0.6276989828906703, + "learning_rate": 9.59323136964023e-06, + "loss": 0.219, + "step": 166 + }, + { + "epoch": 0.2601246105919003, + "grad_norm": 0.8210416456426778, + "learning_rate": 9.588384356125691e-06, + "loss": 0.282, + "step": 167 + }, + { + "epoch": 0.2616822429906542, + "grad_norm": 0.751594963703605, + "learning_rate": 9.583509874469924e-06, + "loss": 0.3373, + "step": 168 + }, + { + "epoch": 0.2632398753894081, + "grad_norm": 0.7261273311893066, + "learning_rate": 9.578607953853777e-06, + "loss": 0.28, + "step": 169 + }, + { + "epoch": 0.26479750778816197, + "grad_norm": 0.6922505407762392, + "learning_rate": 9.573678623622364e-06, + "loss": 0.3412, + "step": 170 + }, + { + "epoch": 0.26635514018691586, + "grad_norm": 0.6419868376151807, + "learning_rate": 9.568721913284879e-06, + "loss": 0.2588, + "step": 171 + }, + { + "epoch": 0.26791277258566976, + "grad_norm": 0.49324052267306684, + "learning_rate": 9.563737852514432e-06, + "loss": 0.2332, + "step": 172 + }, + { + "epoch": 0.26947040498442365, + "grad_norm": 0.5770500149070207, + "learning_rate": 9.558726471147862e-06, + "loss": 0.2937, + "step": 173 + }, + { + "epoch": 0.27102803738317754, + "grad_norm": 0.7329648250931949, + "learning_rate": 9.553687799185556e-06, + "loss": 0.3309, + "step": 174 + }, + { + "epoch": 0.27258566978193144, + "grad_norm": 0.4864393923841514, + "learning_rate": 9.548621866791286e-06, + "loss": 0.2043, + "step": 175 + }, + { + "epoch": 0.27414330218068533, + "grad_norm": 1.136074014302222, + "learning_rate": 9.54352870429201e-06, + "loss": 0.7893, + "step": 176 + }, + { + "epoch": 0.2757009345794392, + "grad_norm": 0.6443586968700755, + "learning_rate": 9.538408342177699e-06, + "loss": 0.3025, + "step": 177 + }, + { + "epoch": 0.2772585669781931, + "grad_norm": 0.6403852564269055, + "learning_rate": 9.533260811101152e-06, + "loss": 0.2629, + "step": 178 + }, + { + "epoch": 0.278816199376947, + "grad_norm": 0.6869290500301357, + "learning_rate": 9.528086141877817e-06, + "loss": 0.301, + "step": 179 + }, + { + "epoch": 0.2803738317757009, + "grad_norm": 0.9338061428659953, + "learning_rate": 9.522884365485599e-06, + "loss": 0.7925, + "step": 180 + }, + { + "epoch": 0.2819314641744548, + "grad_norm": 0.7304512334819939, + "learning_rate": 9.517655513064682e-06, + "loss": 0.44, + "step": 181 + }, + { + "epoch": 0.2834890965732087, + "grad_norm": 0.6207928517482323, + "learning_rate": 9.512399615917342e-06, + "loss": 0.228, + "step": 182 + }, + { + "epoch": 0.2850467289719626, + "grad_norm": 0.8847617104487071, + "learning_rate": 9.507116705507748e-06, + "loss": 0.4137, + "step": 183 + }, + { + "epoch": 0.2866043613707165, + "grad_norm": 0.6631070285038642, + "learning_rate": 9.50180681346179e-06, + "loss": 0.3438, + "step": 184 + }, + { + "epoch": 0.2881619937694704, + "grad_norm": 1.0647904328891042, + "learning_rate": 9.49646997156688e-06, + "loss": 0.2452, + "step": 185 + }, + { + "epoch": 0.2897196261682243, + "grad_norm": 0.4959301062995678, + "learning_rate": 9.491106211771765e-06, + "loss": 0.2915, + "step": 186 + }, + { + "epoch": 0.29127725856697817, + "grad_norm": 0.705003606824088, + "learning_rate": 9.485715566186333e-06, + "loss": 0.2945, + "step": 187 + }, + { + "epoch": 0.29283489096573206, + "grad_norm": 1.045178016891709, + "learning_rate": 9.480298067081422e-06, + "loss": 0.5356, + "step": 188 + }, + { + "epoch": 0.29439252336448596, + "grad_norm": 0.6198122538394608, + "learning_rate": 9.474853746888631e-06, + "loss": 0.2744, + "step": 189 + }, + { + "epoch": 0.29595015576323985, + "grad_norm": 0.631725313682397, + "learning_rate": 9.469382638200119e-06, + "loss": 0.3064, + "step": 190 + }, + { + "epoch": 0.29750778816199375, + "grad_norm": 0.49405112326099454, + "learning_rate": 9.463884773768413e-06, + "loss": 0.2016, + "step": 191 + }, + { + "epoch": 0.29906542056074764, + "grad_norm": 0.9255242318461628, + "learning_rate": 9.458360186506212e-06, + "loss": 0.318, + "step": 192 + }, + { + "epoch": 0.30062305295950154, + "grad_norm": 0.5172541239349757, + "learning_rate": 9.452808909486195e-06, + "loss": 0.2182, + "step": 193 + }, + { + "epoch": 0.30218068535825543, + "grad_norm": 0.6548225129226617, + "learning_rate": 9.447230975940808e-06, + "loss": 0.3501, + "step": 194 + }, + { + "epoch": 0.3037383177570093, + "grad_norm": 0.6373263585258429, + "learning_rate": 9.441626419262084e-06, + "loss": 0.2331, + "step": 195 + }, + { + "epoch": 0.3052959501557632, + "grad_norm": 0.8451103465765266, + "learning_rate": 9.43599527300143e-06, + "loss": 0.2379, + "step": 196 + }, + { + "epoch": 0.3068535825545171, + "grad_norm": 0.6257655537331657, + "learning_rate": 9.430337570869432e-06, + "loss": 0.262, + "step": 197 + }, + { + "epoch": 0.308411214953271, + "grad_norm": 0.7885047351755374, + "learning_rate": 9.424653346735649e-06, + "loss": 0.2783, + "step": 198 + }, + { + "epoch": 0.3099688473520249, + "grad_norm": 1.2716968661440529, + "learning_rate": 9.418942634628413e-06, + "loss": 0.3266, + "step": 199 + }, + { + "epoch": 0.3115264797507788, + "grad_norm": 0.709734211796566, + "learning_rate": 9.413205468734628e-06, + "loss": 0.298, + "step": 200 + }, + { + "epoch": 0.3130841121495327, + "grad_norm": 0.6966111704003821, + "learning_rate": 9.40744188339956e-06, + "loss": 0.3872, + "step": 201 + }, + { + "epoch": 0.3146417445482866, + "grad_norm": 0.6328947624746845, + "learning_rate": 9.401651913126634e-06, + "loss": 0.2855, + "step": 202 + }, + { + "epoch": 0.3161993769470405, + "grad_norm": 0.6782499058815041, + "learning_rate": 9.395835592577228e-06, + "loss": 0.2555, + "step": 203 + }, + { + "epoch": 0.3177570093457944, + "grad_norm": 0.6111146053952768, + "learning_rate": 9.389992956570463e-06, + "loss": 0.2119, + "step": 204 + }, + { + "epoch": 0.31931464174454827, + "grad_norm": 2.1014154014060646, + "learning_rate": 9.384124040082996e-06, + "loss": 1.3393, + "step": 205 + }, + { + "epoch": 0.32087227414330216, + "grad_norm": 0.6937660819701476, + "learning_rate": 9.378228878248814e-06, + "loss": 0.3003, + "step": 206 + }, + { + "epoch": 0.32242990654205606, + "grad_norm": 0.7798606308310687, + "learning_rate": 9.372307506359019e-06, + "loss": 0.2394, + "step": 207 + }, + { + "epoch": 0.32398753894080995, + "grad_norm": 0.8064220607757047, + "learning_rate": 9.366359959861615e-06, + "loss": 0.2457, + "step": 208 + }, + { + "epoch": 0.32554517133956384, + "grad_norm": 0.8663909019847741, + "learning_rate": 9.360386274361305e-06, + "loss": 0.2363, + "step": 209 + }, + { + "epoch": 0.32710280373831774, + "grad_norm": 0.7765065772183143, + "learning_rate": 9.354386485619264e-06, + "loss": 0.2519, + "step": 210 + }, + { + "epoch": 0.32866043613707163, + "grad_norm": 0.6924828379419826, + "learning_rate": 9.348360629552941e-06, + "loss": 0.2237, + "step": 211 + }, + { + "epoch": 0.3302180685358255, + "grad_norm": 0.6564177523605338, + "learning_rate": 9.342308742235831e-06, + "loss": 0.2369, + "step": 212 + }, + { + "epoch": 0.3317757009345794, + "grad_norm": 0.6166208484170821, + "learning_rate": 9.336230859897266e-06, + "loss": 0.2531, + "step": 213 + }, + { + "epoch": 0.3333333333333333, + "grad_norm": 0.6325656097098429, + "learning_rate": 9.330127018922195e-06, + "loss": 0.228, + "step": 214 + }, + { + "epoch": 0.3348909657320872, + "grad_norm": 0.9005060609426595, + "learning_rate": 9.323997255850965e-06, + "loss": 0.2068, + "step": 215 + }, + { + "epoch": 0.3364485981308411, + "grad_norm": 0.6030970162930338, + "learning_rate": 9.317841607379106e-06, + "loss": 0.2269, + "step": 216 + }, + { + "epoch": 0.338006230529595, + "grad_norm": 0.5757510215326483, + "learning_rate": 9.311660110357116e-06, + "loss": 0.2277, + "step": 217 + }, + { + "epoch": 0.3395638629283489, + "grad_norm": 0.48332154427815277, + "learning_rate": 9.30545280179022e-06, + "loss": 0.2088, + "step": 218 + }, + { + "epoch": 0.3411214953271028, + "grad_norm": 1.6587632436476218, + "learning_rate": 9.299219718838174e-06, + "loss": 0.8341, + "step": 219 + }, + { + "epoch": 0.3426791277258567, + "grad_norm": 0.9279443473544912, + "learning_rate": 9.292960898815032e-06, + "loss": 0.2415, + "step": 220 + }, + { + "epoch": 0.3442367601246106, + "grad_norm": 1.6924178170865158, + "learning_rate": 9.286676379188913e-06, + "loss": 0.9856, + "step": 221 + }, + { + "epoch": 0.34579439252336447, + "grad_norm": 1.8214244233005599, + "learning_rate": 9.280366197581792e-06, + "loss": 0.826, + "step": 222 + }, + { + "epoch": 0.34735202492211836, + "grad_norm": 0.783390501917877, + "learning_rate": 9.274030391769264e-06, + "loss": 0.8049, + "step": 223 + }, + { + "epoch": 0.34890965732087226, + "grad_norm": 0.5859949211179467, + "learning_rate": 9.267668999680328e-06, + "loss": 0.2939, + "step": 224 + }, + { + "epoch": 0.35046728971962615, + "grad_norm": 0.7610274820950346, + "learning_rate": 9.261282059397145e-06, + "loss": 0.2581, + "step": 225 + }, + { + "epoch": 0.35202492211838005, + "grad_norm": 0.6066786542121007, + "learning_rate": 9.25486960915483e-06, + "loss": 0.2758, + "step": 226 + }, + { + "epoch": 0.35358255451713394, + "grad_norm": 1.1073512044256302, + "learning_rate": 9.248431687341203e-06, + "loss": 0.237, + "step": 227 + }, + { + "epoch": 0.35514018691588783, + "grad_norm": 1.0141799395602682, + "learning_rate": 9.241968332496576e-06, + "loss": 0.2568, + "step": 228 + }, + { + "epoch": 0.35669781931464173, + "grad_norm": 0.5568072088382595, + "learning_rate": 9.235479583313504e-06, + "loss": 0.2334, + "step": 229 + }, + { + "epoch": 0.3582554517133956, + "grad_norm": 0.7930065901392893, + "learning_rate": 9.228965478636575e-06, + "loss": 0.2793, + "step": 230 + }, + { + "epoch": 0.3598130841121495, + "grad_norm": 0.7048596889355911, + "learning_rate": 9.222426057462162e-06, + "loss": 0.3062, + "step": 231 + }, + { + "epoch": 0.3613707165109034, + "grad_norm": 0.8914753088496622, + "learning_rate": 9.215861358938191e-06, + "loss": 0.3289, + "step": 232 + }, + { + "epoch": 0.3629283489096573, + "grad_norm": 0.8233950771824091, + "learning_rate": 9.209271422363915e-06, + "loss": 0.3021, + "step": 233 + }, + { + "epoch": 0.3644859813084112, + "grad_norm": 0.8572614177235147, + "learning_rate": 9.20265628718967e-06, + "loss": 0.3435, + "step": 234 + }, + { + "epoch": 0.3660436137071651, + "grad_norm": 0.6533204687228193, + "learning_rate": 9.196015993016642e-06, + "loss": 0.2537, + "step": 235 + }, + { + "epoch": 0.367601246105919, + "grad_norm": 0.676657852907951, + "learning_rate": 9.189350579596635e-06, + "loss": 0.273, + "step": 236 + }, + { + "epoch": 0.3691588785046729, + "grad_norm": 0.8090061594796749, + "learning_rate": 9.182660086831819e-06, + "loss": 0.3786, + "step": 237 + }, + { + "epoch": 0.3707165109034268, + "grad_norm": 0.6195251333212162, + "learning_rate": 9.175944554774507e-06, + "loss": 0.2799, + "step": 238 + }, + { + "epoch": 0.37227414330218067, + "grad_norm": 0.5501170288605118, + "learning_rate": 9.169204023626911e-06, + "loss": 0.2577, + "step": 239 + }, + { + "epoch": 0.37383177570093457, + "grad_norm": 0.6087726744520703, + "learning_rate": 9.162438533740891e-06, + "loss": 0.2578, + "step": 240 + }, + { + "epoch": 0.37538940809968846, + "grad_norm": 0.6423864822269919, + "learning_rate": 9.15564812561773e-06, + "loss": 0.2865, + "step": 241 + }, + { + "epoch": 0.37694704049844235, + "grad_norm": 0.6443057475504381, + "learning_rate": 9.148832839907871e-06, + "loss": 0.2898, + "step": 242 + }, + { + "epoch": 0.37850467289719625, + "grad_norm": 0.5688149258379418, + "learning_rate": 9.141992717410697e-06, + "loss": 0.239, + "step": 243 + }, + { + "epoch": 0.38006230529595014, + "grad_norm": 0.6629923621745606, + "learning_rate": 9.135127799074271e-06, + "loss": 0.2614, + "step": 244 + }, + { + "epoch": 0.38161993769470404, + "grad_norm": 0.6300463035343699, + "learning_rate": 9.128238125995093e-06, + "loss": 0.24, + "step": 245 + }, + { + "epoch": 0.38317757009345793, + "grad_norm": 0.643027164394109, + "learning_rate": 9.121323739417858e-06, + "loss": 0.2365, + "step": 246 + }, + { + "epoch": 0.3847352024922118, + "grad_norm": 2.8455942968073282, + "learning_rate": 9.114384680735211e-06, + "loss": 0.7814, + "step": 247 + }, + { + "epoch": 0.3862928348909657, + "grad_norm": 0.630104563255545, + "learning_rate": 9.107420991487488e-06, + "loss": 0.2234, + "step": 248 + }, + { + "epoch": 0.3878504672897196, + "grad_norm": 0.905027542232206, + "learning_rate": 9.100432713362477e-06, + "loss": 0.3142, + "step": 249 + }, + { + "epoch": 0.3894080996884735, + "grad_norm": 0.6960898261331736, + "learning_rate": 9.09341988819517e-06, + "loss": 0.3195, + "step": 250 + }, + { + "epoch": 0.3909657320872274, + "grad_norm": 0.7350311907562319, + "learning_rate": 9.086382557967507e-06, + "loss": 0.2479, + "step": 251 + }, + { + "epoch": 0.3925233644859813, + "grad_norm": 0.7212085527913289, + "learning_rate": 9.07932076480812e-06, + "loss": 0.2334, + "step": 252 + }, + { + "epoch": 0.3940809968847352, + "grad_norm": 0.6340762358232386, + "learning_rate": 9.072234550992099e-06, + "loss": 0.282, + "step": 253 + }, + { + "epoch": 0.3956386292834891, + "grad_norm": 0.5688729860657064, + "learning_rate": 9.065123958940716e-06, + "loss": 0.2928, + "step": 254 + }, + { + "epoch": 0.397196261682243, + "grad_norm": 0.8922598360101682, + "learning_rate": 9.057989031221187e-06, + "loss": 0.281, + "step": 255 + }, + { + "epoch": 0.3987538940809969, + "grad_norm": 0.7114134585553089, + "learning_rate": 9.050829810546416e-06, + "loss": 0.2635, + "step": 256 + }, + { + "epoch": 0.40031152647975077, + "grad_norm": 0.6966492509810489, + "learning_rate": 9.043646339774726e-06, + "loss": 0.3394, + "step": 257 + }, + { + "epoch": 0.40186915887850466, + "grad_norm": 0.5958512506208158, + "learning_rate": 9.036438661909624e-06, + "loss": 0.2718, + "step": 258 + }, + { + "epoch": 0.40342679127725856, + "grad_norm": 0.9116531361266647, + "learning_rate": 9.029206820099518e-06, + "loss": 0.2785, + "step": 259 + }, + { + "epoch": 0.40498442367601245, + "grad_norm": 0.5887944110529719, + "learning_rate": 9.021950857637486e-06, + "loss": 0.2668, + "step": 260 + }, + { + "epoch": 0.40654205607476634, + "grad_norm": 0.6108632031180715, + "learning_rate": 9.014670817960993e-06, + "loss": 0.1969, + "step": 261 + }, + { + "epoch": 0.40809968847352024, + "grad_norm": 0.6907915569936867, + "learning_rate": 9.007366744651646e-06, + "loss": 0.2268, + "step": 262 + }, + { + "epoch": 0.40965732087227413, + "grad_norm": 0.5793225985517833, + "learning_rate": 9.000038681434925e-06, + "loss": 0.1931, + "step": 263 + }, + { + "epoch": 0.411214953271028, + "grad_norm": 1.0784967522591073, + "learning_rate": 8.99268667217993e-06, + "loss": 0.2329, + "step": 264 + }, + { + "epoch": 0.4127725856697819, + "grad_norm": 0.602627598917929, + "learning_rate": 8.985310760899108e-06, + "loss": 0.2574, + "step": 265 + }, + { + "epoch": 0.4143302180685358, + "grad_norm": 0.6724448335681737, + "learning_rate": 8.977910991747993e-06, + "loss": 0.2781, + "step": 266 + }, + { + "epoch": 0.4158878504672897, + "grad_norm": 0.713294514234468, + "learning_rate": 8.970487409024949e-06, + "loss": 0.2284, + "step": 267 + }, + { + "epoch": 0.4174454828660436, + "grad_norm": 0.8427860833648895, + "learning_rate": 8.96304005717089e-06, + "loss": 0.2953, + "step": 268 + }, + { + "epoch": 0.4190031152647975, + "grad_norm": 0.5840913639256132, + "learning_rate": 8.955568980769033e-06, + "loss": 0.2974, + "step": 269 + }, + { + "epoch": 0.4205607476635514, + "grad_norm": 0.6131987426518342, + "learning_rate": 8.948074224544615e-06, + "loss": 0.2773, + "step": 270 + }, + { + "epoch": 0.4221183800623053, + "grad_norm": 0.5947515631497642, + "learning_rate": 8.94055583336463e-06, + "loss": 0.275, + "step": 271 + }, + { + "epoch": 0.4236760124610592, + "grad_norm": 0.5946955459161034, + "learning_rate": 8.933013852237564e-06, + "loss": 0.3307, + "step": 272 + }, + { + "epoch": 0.4252336448598131, + "grad_norm": 0.7451920245650221, + "learning_rate": 8.925448326313125e-06, + "loss": 0.286, + "step": 273 + }, + { + "epoch": 0.42679127725856697, + "grad_norm": 0.924856917547545, + "learning_rate": 8.917859300881965e-06, + "loss": 0.2879, + "step": 274 + }, + { + "epoch": 0.42834890965732086, + "grad_norm": 1.0327909293428048, + "learning_rate": 8.91024682137542e-06, + "loss": 0.2408, + "step": 275 + }, + { + "epoch": 0.42990654205607476, + "grad_norm": 0.880531058290941, + "learning_rate": 8.90261093336523e-06, + "loss": 0.3045, + "step": 276 + }, + { + "epoch": 0.43146417445482865, + "grad_norm": 0.580559025082547, + "learning_rate": 8.89495168256327e-06, + "loss": 0.2805, + "step": 277 + }, + { + "epoch": 0.43302180685358255, + "grad_norm": 0.5475610266945151, + "learning_rate": 8.887269114821275e-06, + "loss": 0.2338, + "step": 278 + }, + { + "epoch": 0.43457943925233644, + "grad_norm": 0.6043881248930308, + "learning_rate": 8.879563276130567e-06, + "loss": 0.2516, + "step": 279 + }, + { + "epoch": 0.43613707165109034, + "grad_norm": 0.6621273162261064, + "learning_rate": 8.871834212621773e-06, + "loss": 0.2485, + "step": 280 + }, + { + "epoch": 0.43769470404984423, + "grad_norm": 0.6108881149273678, + "learning_rate": 8.86408197056456e-06, + "loss": 0.2082, + "step": 281 + }, + { + "epoch": 0.4392523364485981, + "grad_norm": 0.6626887696196829, + "learning_rate": 8.856306596367351e-06, + "loss": 0.2741, + "step": 282 + }, + { + "epoch": 0.440809968847352, + "grad_norm": 0.6517640250027363, + "learning_rate": 8.84850813657705e-06, + "loss": 0.3733, + "step": 283 + }, + { + "epoch": 0.4423676012461059, + "grad_norm": 0.5979891922361272, + "learning_rate": 8.840686637878756e-06, + "loss": 0.2615, + "step": 284 + }, + { + "epoch": 0.4439252336448598, + "grad_norm": 0.5447033448285953, + "learning_rate": 8.832842147095495e-06, + "loss": 0.2204, + "step": 285 + }, + { + "epoch": 0.4454828660436137, + "grad_norm": 0.6176926760203407, + "learning_rate": 8.82497471118793e-06, + "loss": 0.259, + "step": 286 + }, + { + "epoch": 0.4470404984423676, + "grad_norm": 0.6803871655362536, + "learning_rate": 8.817084377254089e-06, + "loss": 0.3406, + "step": 287 + }, + { + "epoch": 0.4485981308411215, + "grad_norm": 1.0924030353117906, + "learning_rate": 8.809171192529074e-06, + "loss": 0.7712, + "step": 288 + }, + { + "epoch": 0.4501557632398754, + "grad_norm": 0.7261898907998104, + "learning_rate": 8.80123520438478e-06, + "loss": 0.2439, + "step": 289 + }, + { + "epoch": 0.4517133956386293, + "grad_norm": 0.5278888030187282, + "learning_rate": 8.79327646032962e-06, + "loss": 0.2838, + "step": 290 + }, + { + "epoch": 0.4532710280373832, + "grad_norm": 0.7336946619047033, + "learning_rate": 8.785295008008227e-06, + "loss": 0.2863, + "step": 291 + }, + { + "epoch": 0.45482866043613707, + "grad_norm": 0.5663680560662904, + "learning_rate": 8.777290895201186e-06, + "loss": 0.2487, + "step": 292 + }, + { + "epoch": 0.45638629283489096, + "grad_norm": 0.6674793012574268, + "learning_rate": 8.769264169824725e-06, + "loss": 0.2115, + "step": 293 + }, + { + "epoch": 0.45794392523364486, + "grad_norm": 0.629476332991769, + "learning_rate": 8.761214879930452e-06, + "loss": 0.2698, + "step": 294 + }, + { + "epoch": 0.45950155763239875, + "grad_norm": 0.5733049643694853, + "learning_rate": 8.753143073705048e-06, + "loss": 0.2755, + "step": 295 + }, + { + "epoch": 0.46105919003115264, + "grad_norm": 0.7496857759281685, + "learning_rate": 8.745048799469996e-06, + "loss": 0.2066, + "step": 296 + }, + { + "epoch": 0.46261682242990654, + "grad_norm": 0.5833814287468523, + "learning_rate": 8.736932105681272e-06, + "loss": 0.2233, + "step": 297 + }, + { + "epoch": 0.46417445482866043, + "grad_norm": 0.7333726802931715, + "learning_rate": 8.728793040929075e-06, + "loss": 0.2529, + "step": 298 + }, + { + "epoch": 0.4657320872274143, + "grad_norm": 0.6752352954357181, + "learning_rate": 8.720631653937522e-06, + "loss": 0.2487, + "step": 299 + }, + { + "epoch": 0.4672897196261682, + "grad_norm": 0.6281610479983413, + "learning_rate": 8.712447993564362e-06, + "loss": 0.2513, + "step": 300 + }, + { + "epoch": 0.4688473520249221, + "grad_norm": 1.719855082212449, + "learning_rate": 8.70424210880068e-06, + "loss": 1.2841, + "step": 301 + }, + { + "epoch": 0.470404984423676, + "grad_norm": 0.8697991461292468, + "learning_rate": 8.696014048770611e-06, + "loss": 0.1752, + "step": 302 + }, + { + "epoch": 0.4719626168224299, + "grad_norm": 0.7388848487578813, + "learning_rate": 8.68776386273104e-06, + "loss": 0.3559, + "step": 303 + }, + { + "epoch": 0.4735202492211838, + "grad_norm": 0.6740217209253545, + "learning_rate": 8.679491600071304e-06, + "loss": 0.2506, + "step": 304 + }, + { + "epoch": 0.4750778816199377, + "grad_norm": 0.8324393939489836, + "learning_rate": 8.671197310312905e-06, + "loss": 0.2951, + "step": 305 + }, + { + "epoch": 0.4766355140186916, + "grad_norm": 0.5859142473050233, + "learning_rate": 8.662881043109208e-06, + "loss": 0.2737, + "step": 306 + }, + { + "epoch": 0.4781931464174455, + "grad_norm": 0.5144143552921643, + "learning_rate": 8.654542848245146e-06, + "loss": 0.2555, + "step": 307 + }, + { + "epoch": 0.4797507788161994, + "grad_norm": 0.9870060592323339, + "learning_rate": 8.646182775636917e-06, + "loss": 0.3268, + "step": 308 + }, + { + "epoch": 0.48130841121495327, + "grad_norm": 1.22444825315241, + "learning_rate": 8.637800875331693e-06, + "loss": 0.8927, + "step": 309 + }, + { + "epoch": 0.48286604361370716, + "grad_norm": 0.6219488136095235, + "learning_rate": 8.629397197507315e-06, + "loss": 0.1925, + "step": 310 + }, + { + "epoch": 0.48442367601246106, + "grad_norm": 0.6663733783513853, + "learning_rate": 8.620971792471994e-06, + "loss": 0.2525, + "step": 311 + }, + { + "epoch": 0.48598130841121495, + "grad_norm": 0.7779309034351384, + "learning_rate": 8.612524710664012e-06, + "loss": 0.2857, + "step": 312 + }, + { + "epoch": 0.48753894080996885, + "grad_norm": 0.5815264215390993, + "learning_rate": 8.604056002651414e-06, + "loss": 0.2132, + "step": 313 + }, + { + "epoch": 0.48909657320872274, + "grad_norm": 0.639985193258881, + "learning_rate": 8.595565719131711e-06, + "loss": 0.2927, + "step": 314 + }, + { + "epoch": 0.49065420560747663, + "grad_norm": 0.5843630349664094, + "learning_rate": 8.587053910931576e-06, + "loss": 0.2073, + "step": 315 + }, + { + "epoch": 0.49221183800623053, + "grad_norm": 0.5718073343739106, + "learning_rate": 8.578520629006537e-06, + "loss": 0.2701, + "step": 316 + }, + { + "epoch": 0.4937694704049844, + "grad_norm": 0.8093573764947887, + "learning_rate": 8.569965924440675e-06, + "loss": 0.3701, + "step": 317 + }, + { + "epoch": 0.4953271028037383, + "grad_norm": 0.45875585137104996, + "learning_rate": 8.561389848446314e-06, + "loss": 0.2535, + "step": 318 + }, + { + "epoch": 0.4968847352024922, + "grad_norm": 0.6800533406672229, + "learning_rate": 8.55279245236372e-06, + "loss": 0.2542, + "step": 319 + }, + { + "epoch": 0.4984423676012461, + "grad_norm": 0.5791491500061517, + "learning_rate": 8.544173787660788e-06, + "loss": 0.2233, + "step": 320 + }, + { + "epoch": 0.5, + "grad_norm": 0.6853895685598861, + "learning_rate": 8.535533905932739e-06, + "loss": 0.1762, + "step": 321 + }, + { + "epoch": 0.5015576323987538, + "grad_norm": 0.5400182911842227, + "learning_rate": 8.526872858901806e-06, + "loss": 0.2565, + "step": 322 + }, + { + "epoch": 0.5031152647975078, + "grad_norm": 0.7099762938056243, + "learning_rate": 8.518190698416929e-06, + "loss": 0.2901, + "step": 323 + }, + { + "epoch": 0.5046728971962616, + "grad_norm": 0.7351812911105055, + "learning_rate": 8.509487476453442e-06, + "loss": 0.2829, + "step": 324 + }, + { + "epoch": 0.5062305295950156, + "grad_norm": 0.6208369210464704, + "learning_rate": 8.500763245112763e-06, + "loss": 0.2916, + "step": 325 + }, + { + "epoch": 0.5077881619937694, + "grad_norm": 0.7390752110429559, + "learning_rate": 8.492018056622083e-06, + "loss": 0.3912, + "step": 326 + }, + { + "epoch": 0.5093457943925234, + "grad_norm": 0.5930641088378493, + "learning_rate": 8.483251963334047e-06, + "loss": 0.2193, + "step": 327 + }, + { + "epoch": 0.5109034267912772, + "grad_norm": 0.6344017285315496, + "learning_rate": 8.474465017726452e-06, + "loss": 0.2191, + "step": 328 + }, + { + "epoch": 0.5124610591900312, + "grad_norm": 0.7250776947740573, + "learning_rate": 8.465657272401921e-06, + "loss": 0.2435, + "step": 329 + }, + { + "epoch": 0.514018691588785, + "grad_norm": 0.6175903219629659, + "learning_rate": 8.456828780087598e-06, + "loss": 0.2005, + "step": 330 + }, + { + "epoch": 0.5155763239875389, + "grad_norm": 0.6186407482326058, + "learning_rate": 8.447979593634823e-06, + "loss": 0.2969, + "step": 331 + }, + { + "epoch": 0.5171339563862928, + "grad_norm": 0.6221498367824214, + "learning_rate": 8.439109766018825e-06, + "loss": 0.2532, + "step": 332 + }, + { + "epoch": 0.5186915887850467, + "grad_norm": 0.5991537960888588, + "learning_rate": 8.430219350338398e-06, + "loss": 0.2029, + "step": 333 + }, + { + "epoch": 0.5202492211838006, + "grad_norm": 0.5074549508649242, + "learning_rate": 8.421308399815586e-06, + "loss": 0.2452, + "step": 334 + }, + { + "epoch": 0.5218068535825545, + "grad_norm": 1.0447367430008974, + "learning_rate": 8.412376967795362e-06, + "loss": 0.7821, + "step": 335 + }, + { + "epoch": 0.5233644859813084, + "grad_norm": 0.6715190449661054, + "learning_rate": 8.403425107745315e-06, + "loss": 0.1873, + "step": 336 + }, + { + "epoch": 0.5249221183800623, + "grad_norm": 0.8046073365053287, + "learning_rate": 8.394452873255321e-06, + "loss": 0.2459, + "step": 337 + }, + { + "epoch": 0.5264797507788161, + "grad_norm": 0.6185327399453182, + "learning_rate": 8.385460318037228e-06, + "loss": 0.2347, + "step": 338 + }, + { + "epoch": 0.5280373831775701, + "grad_norm": 0.6523073370428902, + "learning_rate": 8.376447495924533e-06, + "loss": 0.2494, + "step": 339 + }, + { + "epoch": 0.5295950155763239, + "grad_norm": 0.8230020627407991, + "learning_rate": 8.367414460872064e-06, + "loss": 0.7898, + "step": 340 + }, + { + "epoch": 0.5311526479750779, + "grad_norm": 0.6739918439315772, + "learning_rate": 8.358361266955641e-06, + "loss": 0.3159, + "step": 341 + }, + { + "epoch": 0.5327102803738317, + "grad_norm": 0.7519773686193855, + "learning_rate": 8.34928796837178e-06, + "loss": 0.2593, + "step": 342 + }, + { + "epoch": 0.5342679127725857, + "grad_norm": 0.5445971280446563, + "learning_rate": 8.34019461943734e-06, + "loss": 0.2923, + "step": 343 + }, + { + "epoch": 0.5358255451713395, + "grad_norm": 0.7071155258569332, + "learning_rate": 8.331081274589217e-06, + "loss": 0.2598, + "step": 344 + }, + { + "epoch": 0.5373831775700935, + "grad_norm": 0.719502132159577, + "learning_rate": 8.321947988384006e-06, + "loss": 0.276, + "step": 345 + }, + { + "epoch": 0.5389408099688473, + "grad_norm": 0.5170866766320925, + "learning_rate": 8.312794815497688e-06, + "loss": 0.2236, + "step": 346 + }, + { + "epoch": 0.5404984423676013, + "grad_norm": 0.6654228108717598, + "learning_rate": 8.303621810725287e-06, + "loss": 0.2666, + "step": 347 + }, + { + "epoch": 0.5420560747663551, + "grad_norm": 0.6117403127181333, + "learning_rate": 8.294429028980555e-06, + "loss": 0.2767, + "step": 348 + }, + { + "epoch": 0.543613707165109, + "grad_norm": 0.8584300691210696, + "learning_rate": 8.285216525295636e-06, + "loss": 0.2521, + "step": 349 + }, + { + "epoch": 0.5451713395638629, + "grad_norm": 0.7222215782352946, + "learning_rate": 8.275984354820736e-06, + "loss": 0.2499, + "step": 350 + }, + { + "epoch": 0.5467289719626168, + "grad_norm": 0.643947565152816, + "learning_rate": 8.266732572823799e-06, + "loss": 0.2092, + "step": 351 + }, + { + "epoch": 0.5482866043613707, + "grad_norm": 0.6231517894213087, + "learning_rate": 8.25746123469017e-06, + "loss": 0.2598, + "step": 352 + }, + { + "epoch": 0.5498442367601246, + "grad_norm": 0.5983797307315736, + "learning_rate": 8.248170395922266e-06, + "loss": 0.3241, + "step": 353 + }, + { + "epoch": 0.5514018691588785, + "grad_norm": 0.6611331737862588, + "learning_rate": 8.238860112139246e-06, + "loss": 0.2292, + "step": 354 + }, + { + "epoch": 0.5529595015576324, + "grad_norm": 0.667013942050513, + "learning_rate": 8.229530439076674e-06, + "loss": 0.2911, + "step": 355 + }, + { + "epoch": 0.5545171339563862, + "grad_norm": 0.5711025724816864, + "learning_rate": 8.220181432586187e-06, + "loss": 0.3828, + "step": 356 + }, + { + "epoch": 0.5560747663551402, + "grad_norm": 0.7326983314478233, + "learning_rate": 8.210813148635158e-06, + "loss": 0.4075, + "step": 357 + }, + { + "epoch": 0.557632398753894, + "grad_norm": 0.5806269356738779, + "learning_rate": 8.201425643306367e-06, + "loss": 0.2283, + "step": 358 + }, + { + "epoch": 0.559190031152648, + "grad_norm": 0.8188821554767335, + "learning_rate": 8.192018972797665e-06, + "loss": 0.2549, + "step": 359 + }, + { + "epoch": 0.5607476635514018, + "grad_norm": 0.6809831648038962, + "learning_rate": 8.182593193421625e-06, + "loss": 0.2543, + "step": 360 + }, + { + "epoch": 0.5623052959501558, + "grad_norm": 0.6203982930897892, + "learning_rate": 8.173148361605224e-06, + "loss": 0.3205, + "step": 361 + }, + { + "epoch": 0.5638629283489096, + "grad_norm": 0.4686220806935477, + "learning_rate": 8.163684533889489e-06, + "loss": 0.2356, + "step": 362 + }, + { + "epoch": 0.5654205607476636, + "grad_norm": 0.7261908034014758, + "learning_rate": 8.154201766929167e-06, + "loss": 0.2763, + "step": 363 + }, + { + "epoch": 0.5669781931464174, + "grad_norm": 0.6301811228833201, + "learning_rate": 8.144700117492386e-06, + "loss": 0.2369, + "step": 364 + }, + { + "epoch": 0.5685358255451713, + "grad_norm": 0.7734460446532929, + "learning_rate": 8.135179642460308e-06, + "loss": 0.2722, + "step": 365 + }, + { + "epoch": 0.5700934579439252, + "grad_norm": 0.6997558366317673, + "learning_rate": 8.125640398826803e-06, + "loss": 0.2543, + "step": 366 + }, + { + "epoch": 0.5716510903426791, + "grad_norm": 0.8988136993440748, + "learning_rate": 8.116082443698085e-06, + "loss": 0.2835, + "step": 367 + }, + { + "epoch": 0.573208722741433, + "grad_norm": 0.8141909161490042, + "learning_rate": 8.106505834292396e-06, + "loss": 0.2538, + "step": 368 + }, + { + "epoch": 0.5747663551401869, + "grad_norm": 1.3911734406138567, + "learning_rate": 8.09691062793964e-06, + "loss": 0.7141, + "step": 369 + }, + { + "epoch": 0.5763239875389408, + "grad_norm": 0.6359597508417911, + "learning_rate": 8.087296882081062e-06, + "loss": 0.2794, + "step": 370 + }, + { + "epoch": 0.5778816199376947, + "grad_norm": 0.5841003451023182, + "learning_rate": 8.077664654268883e-06, + "loss": 0.2973, + "step": 371 + }, + { + "epoch": 0.5794392523364486, + "grad_norm": 0.6441301027816297, + "learning_rate": 8.06801400216597e-06, + "loss": 0.2477, + "step": 372 + }, + { + "epoch": 0.5809968847352025, + "grad_norm": 0.6922555735413835, + "learning_rate": 8.058344983545486e-06, + "loss": 0.2524, + "step": 373 + }, + { + "epoch": 0.5825545171339563, + "grad_norm": 0.6083842427437397, + "learning_rate": 8.048657656290545e-06, + "loss": 0.2837, + "step": 374 + }, + { + "epoch": 0.5841121495327103, + "grad_norm": 0.5803164555571954, + "learning_rate": 8.03895207839386e-06, + "loss": 0.3084, + "step": 375 + }, + { + "epoch": 0.5856697819314641, + "grad_norm": 0.8102916914102428, + "learning_rate": 8.029228307957408e-06, + "loss": 0.2462, + "step": 376 + }, + { + "epoch": 0.5872274143302181, + "grad_norm": 0.7061482090328073, + "learning_rate": 8.019486403192069e-06, + "loss": 0.2487, + "step": 377 + }, + { + "epoch": 0.5887850467289719, + "grad_norm": 0.5113763283575798, + "learning_rate": 8.009726422417286e-06, + "loss": 0.2931, + "step": 378 + }, + { + "epoch": 0.5903426791277259, + "grad_norm": 0.6897001371312232, + "learning_rate": 7.99994842406071e-06, + "loss": 0.2186, + "step": 379 + }, + { + "epoch": 0.5919003115264797, + "grad_norm": 0.9072221405351328, + "learning_rate": 7.99015246665786e-06, + "loss": 0.8348, + "step": 380 + }, + { + "epoch": 0.5934579439252337, + "grad_norm": 0.6531934878590409, + "learning_rate": 7.980338608851756e-06, + "loss": 0.2228, + "step": 381 + }, + { + "epoch": 0.5950155763239875, + "grad_norm": 0.7327462885118111, + "learning_rate": 7.970506909392588e-06, + "loss": 0.2433, + "step": 382 + }, + { + "epoch": 0.5965732087227414, + "grad_norm": 1.1643513095965004, + "learning_rate": 7.960657427137347e-06, + "loss": 0.3147, + "step": 383 + }, + { + "epoch": 0.5981308411214953, + "grad_norm": 0.6452843748972524, + "learning_rate": 7.950790221049485e-06, + "loss": 0.3303, + "step": 384 + }, + { + "epoch": 0.5996884735202492, + "grad_norm": 0.7274077390061954, + "learning_rate": 7.940905350198553e-06, + "loss": 0.3347, + "step": 385 + }, + { + "epoch": 0.6012461059190031, + "grad_norm": 0.6178112722471735, + "learning_rate": 7.931002873759852e-06, + "loss": 0.2276, + "step": 386 + }, + { + "epoch": 0.602803738317757, + "grad_norm": 0.7461711392201733, + "learning_rate": 7.921082851014079e-06, + "loss": 0.2634, + "step": 387 + }, + { + "epoch": 0.6043613707165109, + "grad_norm": 0.8795743223985983, + "learning_rate": 7.911145341346972e-06, + "loss": 0.2896, + "step": 388 + }, + { + "epoch": 0.6059190031152648, + "grad_norm": 0.8319649658997385, + "learning_rate": 7.901190404248952e-06, + "loss": 0.7066, + "step": 389 + }, + { + "epoch": 0.6074766355140186, + "grad_norm": 0.5267541152010679, + "learning_rate": 7.89121809931477e-06, + "loss": 0.2416, + "step": 390 + }, + { + "epoch": 0.6090342679127726, + "grad_norm": 0.46625213633043144, + "learning_rate": 7.881228486243144e-06, + "loss": 0.2482, + "step": 391 + }, + { + "epoch": 0.6105919003115264, + "grad_norm": 0.6924717742437231, + "learning_rate": 7.871221624836414e-06, + "loss": 0.3523, + "step": 392 + }, + { + "epoch": 0.6121495327102804, + "grad_norm": 0.5838406566254681, + "learning_rate": 7.861197575000168e-06, + "loss": 0.228, + "step": 393 + }, + { + "epoch": 0.6137071651090342, + "grad_norm": 0.8869116056189287, + "learning_rate": 7.8511563967429e-06, + "loss": 0.2496, + "step": 394 + }, + { + "epoch": 0.6152647975077882, + "grad_norm": 0.45593356473166835, + "learning_rate": 7.841098150175636e-06, + "loss": 0.2643, + "step": 395 + }, + { + "epoch": 0.616822429906542, + "grad_norm": 0.8435131653971922, + "learning_rate": 7.831022895511586e-06, + "loss": 0.2496, + "step": 396 + }, + { + "epoch": 0.618380062305296, + "grad_norm": 0.4987004271913618, + "learning_rate": 7.820930693065771e-06, + "loss": 0.2063, + "step": 397 + }, + { + "epoch": 0.6199376947040498, + "grad_norm": 0.7942215586211049, + "learning_rate": 7.810821603254677e-06, + "loss": 0.2181, + "step": 398 + }, + { + "epoch": 0.6214953271028038, + "grad_norm": 0.6794367411830937, + "learning_rate": 7.800695686595879e-06, + "loss": 0.3009, + "step": 399 + }, + { + "epoch": 0.6230529595015576, + "grad_norm": 0.7273130133089402, + "learning_rate": 7.790553003707691e-06, + "loss": 0.2469, + "step": 400 + }, + { + "epoch": 0.6246105919003115, + "grad_norm": 0.6902330948327355, + "learning_rate": 7.780393615308787e-06, + "loss": 0.2508, + "step": 401 + }, + { + "epoch": 0.6261682242990654, + "grad_norm": 0.7084362576430354, + "learning_rate": 7.770217582217863e-06, + "loss": 0.2551, + "step": 402 + }, + { + "epoch": 0.6277258566978193, + "grad_norm": 0.8029205384416798, + "learning_rate": 7.760024965353246e-06, + "loss": 0.2333, + "step": 403 + }, + { + "epoch": 0.6292834890965732, + "grad_norm": 0.6112029983652504, + "learning_rate": 7.749815825732543e-06, + "loss": 0.298, + "step": 404 + }, + { + "epoch": 0.6308411214953271, + "grad_norm": 0.7494581341489577, + "learning_rate": 7.739590224472275e-06, + "loss": 0.3462, + "step": 405 + }, + { + "epoch": 0.632398753894081, + "grad_norm": 0.7347669711126691, + "learning_rate": 7.729348222787514e-06, + "loss": 0.3149, + "step": 406 + }, + { + "epoch": 0.6339563862928349, + "grad_norm": 0.6796064286407987, + "learning_rate": 7.719089881991503e-06, + "loss": 0.2873, + "step": 407 + }, + { + "epoch": 0.6355140186915887, + "grad_norm": 0.7425509324765857, + "learning_rate": 7.708815263495307e-06, + "loss": 0.3278, + "step": 408 + }, + { + "epoch": 0.6370716510903427, + "grad_norm": 0.609414275478013, + "learning_rate": 7.698524428807431e-06, + "loss": 0.2708, + "step": 409 + }, + { + "epoch": 0.6386292834890965, + "grad_norm": 0.7757117977400942, + "learning_rate": 7.68821743953346e-06, + "loss": 0.2555, + "step": 410 + }, + { + "epoch": 0.6401869158878505, + "grad_norm": 0.6642687790623766, + "learning_rate": 7.677894357375689e-06, + "loss": 0.3625, + "step": 411 + }, + { + "epoch": 0.6417445482866043, + "grad_norm": 0.5791966784356082, + "learning_rate": 7.667555244132749e-06, + "loss": 0.2661, + "step": 412 + }, + { + "epoch": 0.6433021806853583, + "grad_norm": 0.5594732951892226, + "learning_rate": 7.65720016169924e-06, + "loss": 0.2995, + "step": 413 + }, + { + "epoch": 0.6448598130841121, + "grad_norm": 0.6021900759219545, + "learning_rate": 7.646829172065367e-06, + "loss": 0.3099, + "step": 414 + }, + { + "epoch": 0.6464174454828661, + "grad_norm": 0.5562483872284556, + "learning_rate": 7.636442337316555e-06, + "loss": 0.2376, + "step": 415 + }, + { + "epoch": 0.6479750778816199, + "grad_norm": 0.5829741964791303, + "learning_rate": 7.6260397196330895e-06, + "loss": 0.2774, + "step": 416 + }, + { + "epoch": 0.6495327102803738, + "grad_norm": 0.7958468559537486, + "learning_rate": 7.615621381289737e-06, + "loss": 0.2316, + "step": 417 + }, + { + "epoch": 0.6510903426791277, + "grad_norm": 0.6088648202059304, + "learning_rate": 7.6051873846553795e-06, + "loss": 0.33, + "step": 418 + }, + { + "epoch": 0.6526479750778816, + "grad_norm": 0.523142097250351, + "learning_rate": 7.594737792192629e-06, + "loss": 0.2589, + "step": 419 + }, + { + "epoch": 0.6542056074766355, + "grad_norm": 0.6233483869502079, + "learning_rate": 7.584272666457471e-06, + "loss": 0.2409, + "step": 420 + }, + { + "epoch": 0.6557632398753894, + "grad_norm": 0.8831745439168878, + "learning_rate": 7.573792070098873e-06, + "loss": 0.3156, + "step": 421 + }, + { + "epoch": 0.6573208722741433, + "grad_norm": 0.646939395915981, + "learning_rate": 7.5632960658584184e-06, + "loss": 0.1882, + "step": 422 + }, + { + "epoch": 0.6588785046728972, + "grad_norm": 0.7493677482681486, + "learning_rate": 7.5527847165699295e-06, + "loss": 0.2533, + "step": 423 + }, + { + "epoch": 0.660436137071651, + "grad_norm": 0.6895089900125264, + "learning_rate": 7.542258085159091e-06, + "loss": 0.2239, + "step": 424 + }, + { + "epoch": 0.661993769470405, + "grad_norm": 0.8218899831192643, + "learning_rate": 7.531716234643071e-06, + "loss": 0.3025, + "step": 425 + }, + { + "epoch": 0.6635514018691588, + "grad_norm": 0.8511691627825192, + "learning_rate": 7.5211592281301525e-06, + "loss": 0.2081, + "step": 426 + }, + { + "epoch": 0.6651090342679128, + "grad_norm": 0.6202374314769092, + "learning_rate": 7.510587128819341e-06, + "loss": 0.2159, + "step": 427 + }, + { + "epoch": 0.6666666666666666, + "grad_norm": 0.8637856332283039, + "learning_rate": 7.500000000000001e-06, + "loss": 0.6963, + "step": 428 + }, + { + "epoch": 0.6682242990654206, + "grad_norm": 0.7070722564579784, + "learning_rate": 7.489397905051465e-06, + "loss": 0.3265, + "step": 429 + }, + { + "epoch": 0.6697819314641744, + "grad_norm": 1.0912368137134154, + "learning_rate": 7.478780907442665e-06, + "loss": 0.3064, + "step": 430 + }, + { + "epoch": 0.6713395638629284, + "grad_norm": 0.4993170737898787, + "learning_rate": 7.468149070731742e-06, + "loss": 0.3532, + "step": 431 + }, + { + "epoch": 0.6728971962616822, + "grad_norm": 0.6200661171236782, + "learning_rate": 7.457502458565673e-06, + "loss": 0.2325, + "step": 432 + }, + { + "epoch": 0.6744548286604362, + "grad_norm": 0.6152606478186087, + "learning_rate": 7.446841134679888e-06, + "loss": 0.2538, + "step": 433 + }, + { + "epoch": 0.67601246105919, + "grad_norm": 0.5515844149625706, + "learning_rate": 7.436165162897886e-06, + "loss": 0.2619, + "step": 434 + }, + { + "epoch": 0.677570093457944, + "grad_norm": 0.7008609334925875, + "learning_rate": 7.425474607130858e-06, + "loss": 0.3168, + "step": 435 + }, + { + "epoch": 0.6791277258566978, + "grad_norm": 0.6379535213002501, + "learning_rate": 7.414769531377298e-06, + "loss": 0.268, + "step": 436 + }, + { + "epoch": 0.6806853582554517, + "grad_norm": 0.5317208819493666, + "learning_rate": 7.4040499997226245e-06, + "loss": 0.2193, + "step": 437 + }, + { + "epoch": 0.6822429906542056, + "grad_norm": 0.7290549365391932, + "learning_rate": 7.393316076338798e-06, + "loss": 0.3694, + "step": 438 + }, + { + "epoch": 0.6838006230529595, + "grad_norm": 0.8546888899097251, + "learning_rate": 7.382567825483929e-06, + "loss": 0.2822, + "step": 439 + }, + { + "epoch": 0.6853582554517134, + "grad_norm": 0.7390434139959143, + "learning_rate": 7.371805311501905e-06, + "loss": 0.24, + "step": 440 + }, + { + "epoch": 0.6869158878504673, + "grad_norm": 0.7021761813392882, + "learning_rate": 7.361028598821993e-06, + "loss": 0.3065, + "step": 441 + }, + { + "epoch": 0.6884735202492211, + "grad_norm": 0.5340954968447894, + "learning_rate": 7.350237751958466e-06, + "loss": 0.221, + "step": 442 + }, + { + "epoch": 0.6900311526479751, + "grad_norm": 0.5761626294301733, + "learning_rate": 7.339432835510203e-06, + "loss": 0.2345, + "step": 443 + }, + { + "epoch": 0.6915887850467289, + "grad_norm": 0.662018391928594, + "learning_rate": 7.328613914160319e-06, + "loss": 0.3171, + "step": 444 + }, + { + "epoch": 0.6931464174454829, + "grad_norm": 0.8130781056088618, + "learning_rate": 7.3177810526757594e-06, + "loss": 0.2909, + "step": 445 + }, + { + "epoch": 0.6947040498442367, + "grad_norm": 0.7219882547975953, + "learning_rate": 7.3069343159069296e-06, + "loss": 0.2481, + "step": 446 + }, + { + "epoch": 0.6962616822429907, + "grad_norm": 0.6369674341462834, + "learning_rate": 7.296073768787293e-06, + "loss": 0.3649, + "step": 447 + }, + { + "epoch": 0.6978193146417445, + "grad_norm": 0.7223244796104977, + "learning_rate": 7.285199476332991e-06, + "loss": 0.3488, + "step": 448 + }, + { + "epoch": 0.6993769470404985, + "grad_norm": 0.9091117254585579, + "learning_rate": 7.27431150364245e-06, + "loss": 0.3168, + "step": 449 + }, + { + "epoch": 0.7009345794392523, + "grad_norm": 0.7868966044967969, + "learning_rate": 7.263409915895992e-06, + "loss": 0.259, + "step": 450 + }, + { + "epoch": 0.7024922118380063, + "grad_norm": 0.6563385278402535, + "learning_rate": 7.252494778355444e-06, + "loss": 0.25, + "step": 451 + }, + { + "epoch": 0.7040498442367601, + "grad_norm": 0.641132207138942, + "learning_rate": 7.2415661563637506e-06, + "loss": 0.3307, + "step": 452 + }, + { + "epoch": 0.705607476635514, + "grad_norm": 0.7073578438725788, + "learning_rate": 7.23062411534458e-06, + "loss": 0.2261, + "step": 453 + }, + { + "epoch": 0.7071651090342679, + "grad_norm": 1.040324988179143, + "learning_rate": 7.2196687208019315e-06, + "loss": 0.2057, + "step": 454 + }, + { + "epoch": 0.7087227414330218, + "grad_norm": 0.6121914696936145, + "learning_rate": 7.208700038319744e-06, + "loss": 0.3199, + "step": 455 + }, + { + "epoch": 0.7102803738317757, + "grad_norm": 0.48700575482675645, + "learning_rate": 7.1977181335615085e-06, + "loss": 0.2259, + "step": 456 + }, + { + "epoch": 0.7118380062305296, + "grad_norm": 0.545525370035186, + "learning_rate": 7.186723072269863e-06, + "loss": 0.268, + "step": 457 + }, + { + "epoch": 0.7133956386292835, + "grad_norm": 0.846012722177333, + "learning_rate": 7.175714920266214e-06, + "loss": 0.7256, + "step": 458 + }, + { + "epoch": 0.7149532710280374, + "grad_norm": 0.5989442738821008, + "learning_rate": 7.164693743450329e-06, + "loss": 0.3005, + "step": 459 + }, + { + "epoch": 0.7165109034267912, + "grad_norm": 0.6556232944526054, + "learning_rate": 7.153659607799952e-06, + "loss": 0.2745, + "step": 460 + }, + { + "epoch": 0.7180685358255452, + "grad_norm": 0.6984028449665124, + "learning_rate": 7.142612579370402e-06, + "loss": 0.2272, + "step": 461 + }, + { + "epoch": 0.719626168224299, + "grad_norm": 0.5324547293774875, + "learning_rate": 7.131552724294181e-06, + "loss": 0.2518, + "step": 462 + }, + { + "epoch": 0.721183800623053, + "grad_norm": 0.5943008852162496, + "learning_rate": 7.1204801087805765e-06, + "loss": 0.2663, + "step": 463 + }, + { + "epoch": 0.7227414330218068, + "grad_norm": 0.5954388212811877, + "learning_rate": 7.109394799115268e-06, + "loss": 0.25, + "step": 464 + }, + { + "epoch": 0.7242990654205608, + "grad_norm": 0.5237590199785461, + "learning_rate": 7.098296861659925e-06, + "loss": 0.2451, + "step": 465 + }, + { + "epoch": 0.7258566978193146, + "grad_norm": 0.6108790141608955, + "learning_rate": 7.0871863628518136e-06, + "loss": 0.2782, + "step": 466 + }, + { + "epoch": 0.7274143302180686, + "grad_norm": 0.5784066614984076, + "learning_rate": 7.0760633692033975e-06, + "loss": 0.2588, + "step": 467 + }, + { + "epoch": 0.7289719626168224, + "grad_norm": 0.5736026805586273, + "learning_rate": 7.064927947301942e-06, + "loss": 0.3319, + "step": 468 + }, + { + "epoch": 0.7305295950155763, + "grad_norm": 0.660786534496975, + "learning_rate": 7.0537801638091116e-06, + "loss": 0.3207, + "step": 469 + }, + { + "epoch": 0.7320872274143302, + "grad_norm": 0.6164260678789174, + "learning_rate": 7.042620085460574e-06, + "loss": 0.2759, + "step": 470 + }, + { + "epoch": 0.7336448598130841, + "grad_norm": 1.0298833742062845, + "learning_rate": 7.0314477790656e-06, + "loss": 0.2769, + "step": 471 + }, + { + "epoch": 0.735202492211838, + "grad_norm": 0.7848249743313419, + "learning_rate": 7.020263311506659e-06, + "loss": 0.3963, + "step": 472 + }, + { + "epoch": 0.7367601246105919, + "grad_norm": 0.5488287365596327, + "learning_rate": 7.009066749739026e-06, + "loss": 0.2244, + "step": 473 + }, + { + "epoch": 0.7383177570093458, + "grad_norm": 0.5479634675942974, + "learning_rate": 6.99785816079038e-06, + "loss": 0.277, + "step": 474 + }, + { + "epoch": 0.7398753894080997, + "grad_norm": 0.632641842156797, + "learning_rate": 6.986637611760394e-06, + "loss": 0.2948, + "step": 475 + }, + { + "epoch": 0.7414330218068536, + "grad_norm": 0.8957280703439034, + "learning_rate": 6.975405169820344e-06, + "loss": 0.353, + "step": 476 + }, + { + "epoch": 0.7429906542056075, + "grad_norm": 0.575855558736389, + "learning_rate": 6.9641609022127e-06, + "loss": 0.2667, + "step": 477 + }, + { + "epoch": 0.7445482866043613, + "grad_norm": 0.6675031465700932, + "learning_rate": 6.952904876250729e-06, + "loss": 0.239, + "step": 478 + }, + { + "epoch": 0.7461059190031153, + "grad_norm": 0.5488652674770181, + "learning_rate": 6.941637159318083e-06, + "loss": 0.2605, + "step": 479 + }, + { + "epoch": 0.7476635514018691, + "grad_norm": 0.765674305969199, + "learning_rate": 6.9303578188684085e-06, + "loss": 0.2668, + "step": 480 + }, + { + "epoch": 0.7492211838006231, + "grad_norm": 0.6239515815918181, + "learning_rate": 6.919066922424931e-06, + "loss": 0.2883, + "step": 481 + }, + { + "epoch": 0.7507788161993769, + "grad_norm": 0.8869720139234101, + "learning_rate": 6.907764537580053e-06, + "loss": 0.2726, + "step": 482 + }, + { + "epoch": 0.7523364485981309, + "grad_norm": 0.7182796918869947, + "learning_rate": 6.896450731994959e-06, + "loss": 0.2575, + "step": 483 + }, + { + "epoch": 0.7538940809968847, + "grad_norm": 0.689714104473123, + "learning_rate": 6.8851255733992006e-06, + "loss": 0.2548, + "step": 484 + }, + { + "epoch": 0.7554517133956387, + "grad_norm": 0.8752253075858156, + "learning_rate": 6.873789129590287e-06, + "loss": 0.2598, + "step": 485 + }, + { + "epoch": 0.7570093457943925, + "grad_norm": 0.6547980788626615, + "learning_rate": 6.862441468433298e-06, + "loss": 0.274, + "step": 486 + }, + { + "epoch": 0.7585669781931464, + "grad_norm": 0.6955009265885427, + "learning_rate": 6.851082657860453e-06, + "loss": 0.286, + "step": 487 + }, + { + "epoch": 0.7601246105919003, + "grad_norm": 0.6057981135550708, + "learning_rate": 6.839712765870725e-06, + "loss": 0.3072, + "step": 488 + }, + { + "epoch": 0.7616822429906542, + "grad_norm": 0.5562050274960125, + "learning_rate": 6.828331860529422e-06, + "loss": 0.2765, + "step": 489 + }, + { + "epoch": 0.7632398753894081, + "grad_norm": 0.9242326126038012, + "learning_rate": 6.816940009967787e-06, + "loss": 0.8322, + "step": 490 + }, + { + "epoch": 0.764797507788162, + "grad_norm": 0.7207873437414208, + "learning_rate": 6.805537282382581e-06, + "loss": 0.2175, + "step": 491 + }, + { + "epoch": 0.7663551401869159, + "grad_norm": 0.5928431496932391, + "learning_rate": 6.79412374603568e-06, + "loss": 0.24, + "step": 492 + }, + { + "epoch": 0.7679127725856698, + "grad_norm": 0.8086943486132299, + "learning_rate": 6.782699469253671e-06, + "loss": 0.3252, + "step": 493 + }, + { + "epoch": 0.7694704049844237, + "grad_norm": 0.6500702055304157, + "learning_rate": 6.771264520427432e-06, + "loss": 0.2831, + "step": 494 + }, + { + "epoch": 0.7710280373831776, + "grad_norm": 0.5699110226071109, + "learning_rate": 6.759818968011731e-06, + "loss": 0.2604, + "step": 495 + }, + { + "epoch": 0.7725856697819314, + "grad_norm": 0.5580360203832775, + "learning_rate": 6.748362880524819e-06, + "loss": 0.2684, + "step": 496 + }, + { + "epoch": 0.7741433021806854, + "grad_norm": 0.7886971673525824, + "learning_rate": 6.736896326548006e-06, + "loss": 0.2123, + "step": 497 + }, + { + "epoch": 0.7757009345794392, + "grad_norm": 0.6054794222872896, + "learning_rate": 6.7254193747252645e-06, + "loss": 0.3127, + "step": 498 + }, + { + "epoch": 0.7772585669781932, + "grad_norm": 0.7462264966697667, + "learning_rate": 6.713932093762811e-06, + "loss": 0.3051, + "step": 499 + }, + { + "epoch": 0.778816199376947, + "grad_norm": 0.6730107352048917, + "learning_rate": 6.702434552428702e-06, + "loss": 0.3007, + "step": 500 + }, + { + "epoch": 0.778816199376947, + "eval_loss": 0.366039514541626, + "eval_runtime": 2.8278, + "eval_samples_per_second": 9.194, + "eval_steps_per_second": 2.475, + "step": 500 + }, + { + "epoch": 0.780373831775701, + "grad_norm": 0.6861562261314369, + "learning_rate": 6.690926819552408e-06, + "loss": 0.287, + "step": 501 + }, + { + "epoch": 0.7819314641744548, + "grad_norm": 0.9308842751027873, + "learning_rate": 6.679408964024419e-06, + "loss": 0.8811, + "step": 502 + }, + { + "epoch": 0.7834890965732088, + "grad_norm": 0.6729340583401545, + "learning_rate": 6.667881054795818e-06, + "loss": 0.2304, + "step": 503 + }, + { + "epoch": 0.7850467289719626, + "grad_norm": 0.7848693380567189, + "learning_rate": 6.65634316087788e-06, + "loss": 0.2965, + "step": 504 + }, + { + "epoch": 0.7866043613707165, + "grad_norm": 0.7035209610164758, + "learning_rate": 6.6447953513416474e-06, + "loss": 0.2589, + "step": 505 + }, + { + "epoch": 0.7881619937694704, + "grad_norm": 0.5912497530045528, + "learning_rate": 6.633237695317523e-06, + "loss": 0.2566, + "step": 506 + }, + { + "epoch": 0.7897196261682243, + "grad_norm": 0.7247030582803601, + "learning_rate": 6.621670261994857e-06, + "loss": 0.2726, + "step": 507 + }, + { + "epoch": 0.7912772585669782, + "grad_norm": 0.6656199289111854, + "learning_rate": 6.610093120621532e-06, + "loss": 0.2999, + "step": 508 + }, + { + "epoch": 0.7928348909657321, + "grad_norm": 0.8158049430019846, + "learning_rate": 6.598506340503541e-06, + "loss": 0.2453, + "step": 509 + }, + { + "epoch": 0.794392523364486, + "grad_norm": 0.6984808487227331, + "learning_rate": 6.586909991004587e-06, + "loss": 0.3149, + "step": 510 + }, + { + "epoch": 0.7959501557632399, + "grad_norm": 0.6126995511808185, + "learning_rate": 6.575304141545653e-06, + "loss": 0.2666, + "step": 511 + }, + { + "epoch": 0.7975077881619937, + "grad_norm": 0.5505067863178127, + "learning_rate": 6.5636888616046e-06, + "loss": 0.2998, + "step": 512 + }, + { + "epoch": 0.7990654205607477, + "grad_norm": 0.7685023837309144, + "learning_rate": 6.552064220715737e-06, + "loss": 0.1876, + "step": 513 + }, + { + "epoch": 0.8006230529595015, + "grad_norm": 0.6186176349687995, + "learning_rate": 6.5404302884694145e-06, + "loss": 0.2823, + "step": 514 + }, + { + "epoch": 0.8021806853582555, + "grad_norm": 0.6245560858038731, + "learning_rate": 6.528787134511608e-06, + "loss": 0.2063, + "step": 515 + }, + { + "epoch": 0.8037383177570093, + "grad_norm": 0.6027333943405707, + "learning_rate": 6.5171348285434965e-06, + "loss": 0.3079, + "step": 516 + }, + { + "epoch": 0.8052959501557633, + "grad_norm": 0.7334879584524152, + "learning_rate": 6.505473440321044e-06, + "loss": 0.2906, + "step": 517 + }, + { + "epoch": 0.8068535825545171, + "grad_norm": 0.5611150628723894, + "learning_rate": 6.493803039654589e-06, + "loss": 0.2437, + "step": 518 + }, + { + "epoch": 0.8084112149532711, + "grad_norm": 0.5709098533110826, + "learning_rate": 6.48212369640842e-06, + "loss": 0.2695, + "step": 519 + }, + { + "epoch": 0.8099688473520249, + "grad_norm": 0.7469745364074795, + "learning_rate": 6.4704354805003626e-06, + "loss": 0.2828, + "step": 520 + }, + { + "epoch": 0.8115264797507789, + "grad_norm": 0.7580041865120294, + "learning_rate": 6.458738461901354e-06, + "loss": 0.2456, + "step": 521 + }, + { + "epoch": 0.8130841121495327, + "grad_norm": 0.7356278600835281, + "learning_rate": 6.447032710635035e-06, + "loss": 0.2325, + "step": 522 + }, + { + "epoch": 0.8146417445482866, + "grad_norm": 0.5694798902062997, + "learning_rate": 6.435318296777316e-06, + "loss": 0.2763, + "step": 523 + }, + { + "epoch": 0.8161993769470405, + "grad_norm": 0.6227419240061058, + "learning_rate": 6.423595290455971e-06, + "loss": 0.2871, + "step": 524 + }, + { + "epoch": 0.8177570093457944, + "grad_norm": 0.7005734890264759, + "learning_rate": 6.41186376185021e-06, + "loss": 0.3003, + "step": 525 + }, + { + "epoch": 0.8193146417445483, + "grad_norm": 0.8909957470129115, + "learning_rate": 6.400123781190265e-06, + "loss": 0.3328, + "step": 526 + }, + { + "epoch": 0.8208722741433022, + "grad_norm": 0.5534613081412874, + "learning_rate": 6.388375418756959e-06, + "loss": 0.2816, + "step": 527 + }, + { + "epoch": 0.822429906542056, + "grad_norm": 0.7546735553270361, + "learning_rate": 6.3766187448813e-06, + "loss": 0.249, + "step": 528 + }, + { + "epoch": 0.82398753894081, + "grad_norm": 1.08724358600162, + "learning_rate": 6.3648538299440444e-06, + "loss": 0.2978, + "step": 529 + }, + { + "epoch": 0.8255451713395638, + "grad_norm": 0.5510668935985298, + "learning_rate": 6.35308074437529e-06, + "loss": 0.2586, + "step": 530 + }, + { + "epoch": 0.8271028037383178, + "grad_norm": 0.9295747471096518, + "learning_rate": 6.341299558654042e-06, + "loss": 0.4423, + "step": 531 + }, + { + "epoch": 0.8286604361370716, + "grad_norm": 0.6078601432381958, + "learning_rate": 6.329510343307801e-06, + "loss": 0.3089, + "step": 532 + }, + { + "epoch": 0.8302180685358256, + "grad_norm": 0.7009464939112006, + "learning_rate": 6.3177131689121325e-06, + "loss": 0.276, + "step": 533 + }, + { + "epoch": 0.8317757009345794, + "grad_norm": 0.6055023924718476, + "learning_rate": 6.305908106090255e-06, + "loss": 0.289, + "step": 534 + }, + { + "epoch": 0.8333333333333334, + "grad_norm": 1.0146910918408667, + "learning_rate": 6.294095225512604e-06, + "loss": 0.2117, + "step": 535 + }, + { + "epoch": 0.8348909657320872, + "grad_norm": 0.5684609592585866, + "learning_rate": 6.282274597896421e-06, + "loss": 0.268, + "step": 536 + }, + { + "epoch": 0.8364485981308412, + "grad_norm": 0.6324240721524141, + "learning_rate": 6.2704462940053165e-06, + "loss": 0.2348, + "step": 537 + }, + { + "epoch": 0.838006230529595, + "grad_norm": 0.582281483203043, + "learning_rate": 6.2586103846488654e-06, + "loss": 0.2975, + "step": 538 + }, + { + "epoch": 0.839563862928349, + "grad_norm": 0.9776397911217686, + "learning_rate": 6.246766940682165e-06, + "loss": 0.7799, + "step": 539 + }, + { + "epoch": 0.8411214953271028, + "grad_norm": 0.5174311636719064, + "learning_rate": 6.234916033005421e-06, + "loss": 0.1973, + "step": 540 + }, + { + "epoch": 0.8426791277258567, + "grad_norm": 0.6331348424871293, + "learning_rate": 6.22305773256352e-06, + "loss": 0.2749, + "step": 541 + }, + { + "epoch": 0.8442367601246106, + "grad_norm": 0.5229024799327089, + "learning_rate": 6.211192110345603e-06, + "loss": 0.2811, + "step": 542 + }, + { + "epoch": 0.8457943925233645, + "grad_norm": 0.5575336291274628, + "learning_rate": 6.199319237384645e-06, + "loss": 0.2534, + "step": 543 + }, + { + "epoch": 0.8473520249221184, + "grad_norm": 0.7289277957152529, + "learning_rate": 6.187439184757025e-06, + "loss": 0.7677, + "step": 544 + }, + { + "epoch": 0.8489096573208723, + "grad_norm": 0.540862000313681, + "learning_rate": 6.1755520235821055e-06, + "loss": 0.3294, + "step": 545 + }, + { + "epoch": 0.8504672897196262, + "grad_norm": 0.7035928510596402, + "learning_rate": 6.163657825021802e-06, + "loss": 0.3147, + "step": 546 + }, + { + "epoch": 0.8520249221183801, + "grad_norm": 0.5438019081147566, + "learning_rate": 6.1517566602801596e-06, + "loss": 0.2003, + "step": 547 + }, + { + "epoch": 0.8535825545171339, + "grad_norm": 0.8025432437697821, + "learning_rate": 6.139848600602926e-06, + "loss": 0.2756, + "step": 548 + }, + { + "epoch": 0.8551401869158879, + "grad_norm": 0.654724311621637, + "learning_rate": 6.127933717277123e-06, + "loss": 0.2934, + "step": 549 + }, + { + "epoch": 0.8566978193146417, + "grad_norm": 1.328494349119985, + "learning_rate": 6.116012081630629e-06, + "loss": 0.2731, + "step": 550 + }, + { + "epoch": 0.8582554517133957, + "grad_norm": 0.5435085655801555, + "learning_rate": 6.104083765031734e-06, + "loss": 0.1934, + "step": 551 + }, + { + "epoch": 0.8598130841121495, + "grad_norm": 0.7875228878328122, + "learning_rate": 6.0921488388887315e-06, + "loss": 0.7651, + "step": 552 + }, + { + "epoch": 0.8613707165109035, + "grad_norm": 0.9138141754922854, + "learning_rate": 6.080207374649482e-06, + "loss": 0.6927, + "step": 553 + }, + { + "epoch": 0.8629283489096573, + "grad_norm": 0.658537276564411, + "learning_rate": 6.068259443800981e-06, + "loss": 0.3088, + "step": 554 + }, + { + "epoch": 0.8644859813084113, + "grad_norm": 0.8424646536545584, + "learning_rate": 6.0563051178689395e-06, + "loss": 0.6504, + "step": 555 + }, + { + "epoch": 0.8660436137071651, + "grad_norm": 0.5487578856405264, + "learning_rate": 6.0443444684173524e-06, + "loss": 0.2504, + "step": 556 + }, + { + "epoch": 0.867601246105919, + "grad_norm": 0.577742497246078, + "learning_rate": 6.032377567048071e-06, + "loss": 0.2724, + "step": 557 + }, + { + "epoch": 0.8691588785046729, + "grad_norm": 0.5720861179090082, + "learning_rate": 6.0204044854003705e-06, + "loss": 0.2494, + "step": 558 + }, + { + "epoch": 0.8707165109034268, + "grad_norm": 0.614579567677496, + "learning_rate": 6.008425295150526e-06, + "loss": 0.2431, + "step": 559 + }, + { + "epoch": 0.8722741433021807, + "grad_norm": 0.7053311054530548, + "learning_rate": 5.996440068011383e-06, + "loss": 0.3007, + "step": 560 + }, + { + "epoch": 0.8738317757009346, + "grad_norm": 0.6676390464189279, + "learning_rate": 5.9844488757319205e-06, + "loss": 0.2309, + "step": 561 + }, + { + "epoch": 0.8753894080996885, + "grad_norm": 0.7749136512043995, + "learning_rate": 5.972451790096837e-06, + "loss": 0.3327, + "step": 562 + }, + { + "epoch": 0.8769470404984424, + "grad_norm": 0.6384638289071073, + "learning_rate": 5.960448882926101e-06, + "loss": 0.3447, + "step": 563 + }, + { + "epoch": 0.8785046728971962, + "grad_norm": 0.5992691951009588, + "learning_rate": 5.948440226074539e-06, + "loss": 0.2181, + "step": 564 + }, + { + "epoch": 0.8800623052959502, + "grad_norm": 0.7553595588283479, + "learning_rate": 5.936425891431394e-06, + "loss": 0.2307, + "step": 565 + }, + { + "epoch": 0.881619937694704, + "grad_norm": 0.7852167594459105, + "learning_rate": 5.924405950919902e-06, + "loss": 0.3119, + "step": 566 + }, + { + "epoch": 0.883177570093458, + "grad_norm": 0.5241767306732831, + "learning_rate": 5.91238047649685e-06, + "loss": 0.2293, + "step": 567 + }, + { + "epoch": 0.8847352024922118, + "grad_norm": 0.7471757161407457, + "learning_rate": 5.900349540152167e-06, + "loss": 0.3251, + "step": 568 + }, + { + "epoch": 0.8862928348909658, + "grad_norm": 0.7843562558921093, + "learning_rate": 5.888313213908468e-06, + "loss": 0.2868, + "step": 569 + }, + { + "epoch": 0.8878504672897196, + "grad_norm": 0.7503757117692468, + "learning_rate": 5.876271569820638e-06, + "loss": 0.2555, + "step": 570 + }, + { + "epoch": 0.8894080996884736, + "grad_norm": 0.7275473381141497, + "learning_rate": 5.864224679975399e-06, + "loss": 0.2945, + "step": 571 + }, + { + "epoch": 0.8909657320872274, + "grad_norm": 0.7108035276261796, + "learning_rate": 5.852172616490875e-06, + "loss": 0.2826, + "step": 572 + }, + { + "epoch": 0.8925233644859814, + "grad_norm": 0.7292991034746364, + "learning_rate": 5.84011545151616e-06, + "loss": 0.2052, + "step": 573 + }, + { + "epoch": 0.8940809968847352, + "grad_norm": 0.7701552350107903, + "learning_rate": 5.828053257230893e-06, + "loss": 0.272, + "step": 574 + }, + { + "epoch": 0.8956386292834891, + "grad_norm": 0.5385532832886137, + "learning_rate": 5.815986105844813e-06, + "loss": 0.2859, + "step": 575 + }, + { + "epoch": 0.897196261682243, + "grad_norm": 0.4784130573714912, + "learning_rate": 5.803914069597342e-06, + "loss": 0.2385, + "step": 576 + }, + { + "epoch": 0.8987538940809969, + "grad_norm": 0.6661872649899948, + "learning_rate": 5.791837220757139e-06, + "loss": 0.2601, + "step": 577 + }, + { + "epoch": 0.9003115264797508, + "grad_norm": 0.6423784935357807, + "learning_rate": 5.779755631621679e-06, + "loss": 0.2861, + "step": 578 + }, + { + "epoch": 0.9018691588785047, + "grad_norm": 0.5564393840451491, + "learning_rate": 5.767669374516807e-06, + "loss": 0.2247, + "step": 579 + }, + { + "epoch": 0.9034267912772586, + "grad_norm": 1.0831258133460262, + "learning_rate": 5.755578521796321e-06, + "loss": 0.7525, + "step": 580 + }, + { + "epoch": 0.9049844236760125, + "grad_norm": 0.7513349003919912, + "learning_rate": 5.743483145841525e-06, + "loss": 0.2417, + "step": 581 + }, + { + "epoch": 0.9065420560747663, + "grad_norm": 0.6795049942946425, + "learning_rate": 5.731383319060805e-06, + "loss": 0.3177, + "step": 582 + }, + { + "epoch": 0.9080996884735203, + "grad_norm": 0.7390882203257174, + "learning_rate": 5.719279113889184e-06, + "loss": 0.3581, + "step": 583 + }, + { + "epoch": 0.9096573208722741, + "grad_norm": 0.582238701325137, + "learning_rate": 5.707170602787908e-06, + "loss": 0.2755, + "step": 584 + }, + { + "epoch": 0.9112149532710281, + "grad_norm": 0.5600730304265147, + "learning_rate": 5.695057858243989e-06, + "loss": 0.2745, + "step": 585 + }, + { + "epoch": 0.9127725856697819, + "grad_norm": 0.638261445819756, + "learning_rate": 5.682940952769788e-06, + "loss": 0.177, + "step": 586 + }, + { + "epoch": 0.9143302180685359, + "grad_norm": 0.6191376433403027, + "learning_rate": 5.670819958902576e-06, + "loss": 0.2447, + "step": 587 + }, + { + "epoch": 0.9158878504672897, + "grad_norm": 0.8780779667116613, + "learning_rate": 5.658694949204094e-06, + "loss": 0.2438, + "step": 588 + }, + { + "epoch": 0.9174454828660437, + "grad_norm": 0.7198384217051569, + "learning_rate": 5.646565996260129e-06, + "loss": 0.2408, + "step": 589 + }, + { + "epoch": 0.9190031152647975, + "grad_norm": 1.0167707545321327, + "learning_rate": 5.634433172680072e-06, + "loss": 0.7316, + "step": 590 + }, + { + "epoch": 0.9205607476635514, + "grad_norm": 0.6546640817603999, + "learning_rate": 5.622296551096481e-06, + "loss": 0.3253, + "step": 591 + }, + { + "epoch": 0.9221183800623053, + "grad_norm": 0.8382914937621136, + "learning_rate": 5.61015620416466e-06, + "loss": 0.2981, + "step": 592 + }, + { + "epoch": 0.9236760124610592, + "grad_norm": 0.6120676244562511, + "learning_rate": 5.598012204562204e-06, + "loss": 0.2647, + "step": 593 + }, + { + "epoch": 0.9252336448598131, + "grad_norm": 0.700304920368459, + "learning_rate": 5.5858646249885855e-06, + "loss": 0.2249, + "step": 594 + }, + { + "epoch": 0.926791277258567, + "grad_norm": 0.6840607666164885, + "learning_rate": 5.573713538164698e-06, + "loss": 0.2795, + "step": 595 + }, + { + "epoch": 0.9283489096573209, + "grad_norm": 0.8513439406827232, + "learning_rate": 5.561559016832438e-06, + "loss": 0.2931, + "step": 596 + }, + { + "epoch": 0.9299065420560748, + "grad_norm": 0.7770854828276103, + "learning_rate": 5.549401133754259e-06, + "loss": 0.2819, + "step": 597 + }, + { + "epoch": 0.9314641744548287, + "grad_norm": 0.5874428152071298, + "learning_rate": 5.5372399617127415e-06, + "loss": 0.4483, + "step": 598 + }, + { + "epoch": 0.9330218068535826, + "grad_norm": 0.6488291930752155, + "learning_rate": 5.525075573510154e-06, + "loss": 0.3873, + "step": 599 + }, + { + "epoch": 0.9345794392523364, + "grad_norm": 0.5286049009581415, + "learning_rate": 5.512908041968018e-06, + "loss": 0.3113, + "step": 600 + }, + { + "epoch": 0.9361370716510904, + "grad_norm": 0.5929114341675884, + "learning_rate": 5.500737439926674e-06, + "loss": 0.334, + "step": 601 + }, + { + "epoch": 0.9376947040498442, + "grad_norm": 0.847988678139219, + "learning_rate": 5.488563840244843e-06, + "loss": 0.3026, + "step": 602 + }, + { + "epoch": 0.9392523364485982, + "grad_norm": 0.5635536494534855, + "learning_rate": 5.476387315799189e-06, + "loss": 0.2146, + "step": 603 + }, + { + "epoch": 0.940809968847352, + "grad_norm": 0.5570339586064699, + "learning_rate": 5.464207939483891e-06, + "loss": 0.2407, + "step": 604 + }, + { + "epoch": 0.942367601246106, + "grad_norm": 0.5996622051462993, + "learning_rate": 5.452025784210193e-06, + "loss": 0.2301, + "step": 605 + }, + { + "epoch": 0.9439252336448598, + "grad_norm": 0.6134433053870486, + "learning_rate": 5.439840922905982e-06, + "loss": 0.2881, + "step": 606 + }, + { + "epoch": 0.9454828660436138, + "grad_norm": 0.6534399078221188, + "learning_rate": 5.42765342851534e-06, + "loss": 0.2991, + "step": 607 + }, + { + "epoch": 0.9470404984423676, + "grad_norm": 0.690575160461211, + "learning_rate": 5.415463373998112e-06, + "loss": 0.3353, + "step": 608 + }, + { + "epoch": 0.9485981308411215, + "grad_norm": 0.8921403858615625, + "learning_rate": 5.403270832329473e-06, + "loss": 0.2008, + "step": 609 + }, + { + "epoch": 0.9501557632398754, + "grad_norm": 0.7025072704858318, + "learning_rate": 5.391075876499483e-06, + "loss": 0.2621, + "step": 610 + }, + { + "epoch": 0.9517133956386293, + "grad_norm": 0.6757376481036776, + "learning_rate": 5.3788785795126554e-06, + "loss": 0.2469, + "step": 611 + }, + { + "epoch": 0.9532710280373832, + "grad_norm": 0.6875817981934039, + "learning_rate": 5.36667901438752e-06, + "loss": 0.236, + "step": 612 + }, + { + "epoch": 0.9548286604361371, + "grad_norm": 0.7529907066389188, + "learning_rate": 5.354477254156184e-06, + "loss": 0.2755, + "step": 613 + }, + { + "epoch": 0.956386292834891, + "grad_norm": 0.5896914001022201, + "learning_rate": 5.342273371863895e-06, + "loss": 0.2634, + "step": 614 + }, + { + "epoch": 0.9579439252336449, + "grad_norm": 0.7027739875415374, + "learning_rate": 5.330067440568605e-06, + "loss": 0.2829, + "step": 615 + }, + { + "epoch": 0.9595015576323987, + "grad_norm": 0.7667507882778263, + "learning_rate": 5.317859533340532e-06, + "loss": 0.3506, + "step": 616 + }, + { + "epoch": 0.9610591900311527, + "grad_norm": 0.8456849495117152, + "learning_rate": 5.30564972326172e-06, + "loss": 0.3054, + "step": 617 + }, + { + "epoch": 0.9626168224299065, + "grad_norm": 0.51248054747711, + "learning_rate": 5.293438083425611e-06, + "loss": 0.2301, + "step": 618 + }, + { + "epoch": 0.9641744548286605, + "grad_norm": 0.7467447701014356, + "learning_rate": 5.281224686936594e-06, + "loss": 0.3769, + "step": 619 + }, + { + "epoch": 0.9657320872274143, + "grad_norm": 0.6414852824849853, + "learning_rate": 5.26900960690958e-06, + "loss": 0.2779, + "step": 620 + }, + { + "epoch": 0.9672897196261683, + "grad_norm": 0.6295561918474766, + "learning_rate": 5.256792916469552e-06, + "loss": 0.2586, + "step": 621 + }, + { + "epoch": 0.9688473520249221, + "grad_norm": 0.6049915483456579, + "learning_rate": 5.244574688751138e-06, + "loss": 0.2195, + "step": 622 + }, + { + "epoch": 0.9704049844236761, + "grad_norm": 1.0443726591172307, + "learning_rate": 5.23235499689817e-06, + "loss": 0.7974, + "step": 623 + }, + { + "epoch": 0.9719626168224299, + "grad_norm": 0.4947295180592083, + "learning_rate": 5.220133914063239e-06, + "loss": 0.2173, + "step": 624 + }, + { + "epoch": 0.9735202492211839, + "grad_norm": 0.774384784856486, + "learning_rate": 5.20791151340727e-06, + "loss": 0.2196, + "step": 625 + }, + { + "epoch": 0.9750778816199377, + "grad_norm": 0.6848166382044595, + "learning_rate": 5.195687868099073e-06, + "loss": 0.2784, + "step": 626 + }, + { + "epoch": 0.9766355140186916, + "grad_norm": 1.5114886266488685, + "learning_rate": 5.1834630513149086e-06, + "loss": 0.8531, + "step": 627 + }, + { + "epoch": 0.9781931464174455, + "grad_norm": 1.0107267590162416, + "learning_rate": 5.171237136238054e-06, + "loss": 0.2692, + "step": 628 + }, + { + "epoch": 0.9797507788161994, + "grad_norm": 0.7292686375997546, + "learning_rate": 5.159010196058356e-06, + "loss": 0.2305, + "step": 629 + }, + { + "epoch": 0.9813084112149533, + "grad_norm": 0.565137268345777, + "learning_rate": 5.1467823039718046e-06, + "loss": 0.3076, + "step": 630 + }, + { + "epoch": 0.9828660436137072, + "grad_norm": 1.0043606696953649, + "learning_rate": 5.134553533180082e-06, + "loss": 0.7515, + "step": 631 + }, + { + "epoch": 0.9844236760124611, + "grad_norm": 0.6233222851344706, + "learning_rate": 5.122323956890136e-06, + "loss": 0.3019, + "step": 632 + }, + { + "epoch": 0.985981308411215, + "grad_norm": 0.730841019679576, + "learning_rate": 5.110093648313732e-06, + "loss": 0.3483, + "step": 633 + }, + { + "epoch": 0.9875389408099688, + "grad_norm": 0.6632089778706831, + "learning_rate": 5.097862680667024e-06, + "loss": 0.3608, + "step": 634 + }, + { + "epoch": 0.9890965732087228, + "grad_norm": 0.6691349603071851, + "learning_rate": 5.085631127170106e-06, + "loss": 0.2524, + "step": 635 + }, + { + "epoch": 0.9906542056074766, + "grad_norm": 0.73951871814352, + "learning_rate": 5.073399061046584e-06, + "loss": 0.3134, + "step": 636 + }, + { + "epoch": 0.9922118380062306, + "grad_norm": 0.6079224831882191, + "learning_rate": 5.061166555523129e-06, + "loss": 0.308, + "step": 637 + }, + { + "epoch": 0.9937694704049844, + "grad_norm": 0.6166501321049228, + "learning_rate": 5.048933683829046e-06, + "loss": 0.3073, + "step": 638 + }, + { + "epoch": 0.9953271028037384, + "grad_norm": 1.003680827370098, + "learning_rate": 5.0367005191958275e-06, + "loss": 0.3229, + "step": 639 + }, + { + "epoch": 0.9968847352024922, + "grad_norm": 0.7517097037555416, + "learning_rate": 5.024467134856725e-06, + "loss": 0.2744, + "step": 640 + }, + { + "epoch": 0.9984423676012462, + "grad_norm": 0.5399606823979543, + "learning_rate": 5.012233604046303e-06, + "loss": 0.247, + "step": 641 + }, + { + "epoch": 1.0, + "grad_norm": 0.602288219366613, + "learning_rate": 5e-06, + "loss": 0.2401, + "step": 642 + }, + { + "epoch": 1.0015576323987538, + "grad_norm": 0.5865025697495287, + "learning_rate": 4.987766395953699e-06, + "loss": 0.1907, + "step": 643 + }, + { + "epoch": 1.0031152647975077, + "grad_norm": 0.6271078194072086, + "learning_rate": 4.975532865143277e-06, + "loss": 0.1837, + "step": 644 + }, + { + "epoch": 1.0046728971962617, + "grad_norm": 0.5433097442594456, + "learning_rate": 4.963299480804173e-06, + "loss": 0.2122, + "step": 645 + }, + { + "epoch": 1.0062305295950156, + "grad_norm": 0.6939024837652152, + "learning_rate": 4.951066316170956e-06, + "loss": 0.2464, + "step": 646 + }, + { + "epoch": 1.0077881619937694, + "grad_norm": 0.6120591075726564, + "learning_rate": 4.938833444476873e-06, + "loss": 0.2943, + "step": 647 + }, + { + "epoch": 1.0093457943925233, + "grad_norm": 0.5279986678775014, + "learning_rate": 4.926600938953418e-06, + "loss": 0.2139, + "step": 648 + }, + { + "epoch": 1.0109034267912773, + "grad_norm": 0.5465124083851354, + "learning_rate": 4.9143688728298946e-06, + "loss": 0.2946, + "step": 649 + }, + { + "epoch": 1.0124610591900312, + "grad_norm": 0.598599797464655, + "learning_rate": 4.9021373193329775e-06, + "loss": 0.2467, + "step": 650 + }, + { + "epoch": 1.014018691588785, + "grad_norm": 0.5911973059402644, + "learning_rate": 4.889906351686269e-06, + "loss": 0.2439, + "step": 651 + }, + { + "epoch": 1.0155763239875388, + "grad_norm": 0.5007183880780492, + "learning_rate": 4.8776760431098665e-06, + "loss": 0.2448, + "step": 652 + }, + { + "epoch": 1.017133956386293, + "grad_norm": 0.6198547585957133, + "learning_rate": 4.865446466819918e-06, + "loss": 0.2107, + "step": 653 + }, + { + "epoch": 1.0186915887850467, + "grad_norm": 0.6141060329229784, + "learning_rate": 4.853217696028197e-06, + "loss": 0.2124, + "step": 654 + }, + { + "epoch": 1.0202492211838006, + "grad_norm": 0.5500970083546799, + "learning_rate": 4.840989803941645e-06, + "loss": 0.2413, + "step": 655 + }, + { + "epoch": 1.0218068535825544, + "grad_norm": 0.5785491153029504, + "learning_rate": 4.828762863761948e-06, + "loss": 0.322, + "step": 656 + }, + { + "epoch": 1.0233644859813085, + "grad_norm": 0.5158799094788992, + "learning_rate": 4.816536948685091e-06, + "loss": 0.2458, + "step": 657 + }, + { + "epoch": 1.0249221183800623, + "grad_norm": 0.7110301489590548, + "learning_rate": 4.804312131900929e-06, + "loss": 0.1491, + "step": 658 + }, + { + "epoch": 1.0264797507788161, + "grad_norm": 0.49142641785128816, + "learning_rate": 4.792088486592731e-06, + "loss": 0.2403, + "step": 659 + }, + { + "epoch": 1.02803738317757, + "grad_norm": 0.6365446048004576, + "learning_rate": 4.779866085936762e-06, + "loss": 0.2723, + "step": 660 + }, + { + "epoch": 1.029595015576324, + "grad_norm": 0.5622051291501104, + "learning_rate": 4.767645003101831e-06, + "loss": 0.1508, + "step": 661 + }, + { + "epoch": 1.0311526479750779, + "grad_norm": 0.7978535815313595, + "learning_rate": 4.755425311248863e-06, + "loss": 0.7126, + "step": 662 + }, + { + "epoch": 1.0327102803738317, + "grad_norm": 0.49688606974260013, + "learning_rate": 4.7432070835304494e-06, + "loss": 0.1821, + "step": 663 + }, + { + "epoch": 1.0342679127725856, + "grad_norm": 0.9843736648935775, + "learning_rate": 4.730990393090422e-06, + "loss": 0.6915, + "step": 664 + }, + { + "epoch": 1.0358255451713396, + "grad_norm": 0.5197585726631249, + "learning_rate": 4.718775313063406e-06, + "loss": 0.1985, + "step": 665 + }, + { + "epoch": 1.0373831775700935, + "grad_norm": 0.7127374001801975, + "learning_rate": 4.70656191657439e-06, + "loss": 0.2209, + "step": 666 + }, + { + "epoch": 1.0389408099688473, + "grad_norm": 0.5805515887865611, + "learning_rate": 4.6943502767382815e-06, + "loss": 0.1781, + "step": 667 + }, + { + "epoch": 1.0404984423676011, + "grad_norm": 0.5812301249853751, + "learning_rate": 4.6821404666594715e-06, + "loss": 0.2362, + "step": 668 + }, + { + "epoch": 1.0420560747663552, + "grad_norm": 0.5137436961715455, + "learning_rate": 4.669932559431396e-06, + "loss": 0.2381, + "step": 669 + }, + { + "epoch": 1.043613707165109, + "grad_norm": 0.49254347681935357, + "learning_rate": 4.657726628136105e-06, + "loss": 0.2335, + "step": 670 + }, + { + "epoch": 1.0451713395638629, + "grad_norm": 0.807702133830089, + "learning_rate": 4.645522745843817e-06, + "loss": 0.1636, + "step": 671 + }, + { + "epoch": 1.0467289719626167, + "grad_norm": 0.6598785992415666, + "learning_rate": 4.6333209856124814e-06, + "loss": 0.2039, + "step": 672 + }, + { + "epoch": 1.0482866043613708, + "grad_norm": 0.5735476926058175, + "learning_rate": 4.621121420487345e-06, + "loss": 0.2344, + "step": 673 + }, + { + "epoch": 1.0498442367601246, + "grad_norm": 0.5406292279451508, + "learning_rate": 4.608924123500519e-06, + "loss": 0.1802, + "step": 674 + }, + { + "epoch": 1.0514018691588785, + "grad_norm": 0.5983081571916266, + "learning_rate": 4.596729167670529e-06, + "loss": 0.1676, + "step": 675 + }, + { + "epoch": 1.0529595015576323, + "grad_norm": 0.5810265835484077, + "learning_rate": 4.58453662600189e-06, + "loss": 0.2822, + "step": 676 + }, + { + "epoch": 1.0545171339563864, + "grad_norm": 0.6855576677633736, + "learning_rate": 4.572346571484661e-06, + "loss": 0.1977, + "step": 677 + }, + { + "epoch": 1.0560747663551402, + "grad_norm": 0.5335540200996262, + "learning_rate": 4.5601590770940195e-06, + "loss": 0.2152, + "step": 678 + }, + { + "epoch": 1.057632398753894, + "grad_norm": 0.6453393789551976, + "learning_rate": 4.547974215789808e-06, + "loss": 0.1886, + "step": 679 + }, + { + "epoch": 1.0591900311526479, + "grad_norm": 0.5915758123605618, + "learning_rate": 4.535792060516112e-06, + "loss": 0.2209, + "step": 680 + }, + { + "epoch": 1.060747663551402, + "grad_norm": 0.6302498485071517, + "learning_rate": 4.523612684200811e-06, + "loss": 0.239, + "step": 681 + }, + { + "epoch": 1.0623052959501558, + "grad_norm": 0.7064325822119844, + "learning_rate": 4.511436159755159e-06, + "loss": 0.1988, + "step": 682 + }, + { + "epoch": 1.0638629283489096, + "grad_norm": 0.5229568055974967, + "learning_rate": 4.499262560073328e-06, + "loss": 0.2219, + "step": 683 + }, + { + "epoch": 1.0654205607476634, + "grad_norm": 0.848853210404809, + "learning_rate": 4.487091958031984e-06, + "loss": 0.1813, + "step": 684 + }, + { + "epoch": 1.0669781931464175, + "grad_norm": 0.62716757784117, + "learning_rate": 4.474924426489847e-06, + "loss": 0.2433, + "step": 685 + }, + { + "epoch": 1.0685358255451713, + "grad_norm": 0.5961449452514619, + "learning_rate": 4.46276003828726e-06, + "loss": 0.2576, + "step": 686 + }, + { + "epoch": 1.0700934579439252, + "grad_norm": 0.5336164214991632, + "learning_rate": 4.450598866245743e-06, + "loss": 0.2094, + "step": 687 + }, + { + "epoch": 1.071651090342679, + "grad_norm": 0.5350296348658355, + "learning_rate": 4.438440983167564e-06, + "loss": 0.2883, + "step": 688 + }, + { + "epoch": 1.073208722741433, + "grad_norm": 0.53810152034016, + "learning_rate": 4.426286461835303e-06, + "loss": 0.1968, + "step": 689 + }, + { + "epoch": 1.074766355140187, + "grad_norm": 0.4674678946148812, + "learning_rate": 4.414135375011416e-06, + "loss": 0.1468, + "step": 690 + }, + { + "epoch": 1.0763239875389408, + "grad_norm": 0.5672398628672646, + "learning_rate": 4.401987795437797e-06, + "loss": 0.1796, + "step": 691 + }, + { + "epoch": 1.0778816199376946, + "grad_norm": 0.6409062495534954, + "learning_rate": 4.3898437958353435e-06, + "loss": 0.1536, + "step": 692 + }, + { + "epoch": 1.0794392523364487, + "grad_norm": 0.7979975112099554, + "learning_rate": 4.377703448903519e-06, + "loss": 0.7814, + "step": 693 + }, + { + "epoch": 1.0809968847352025, + "grad_norm": 0.6128214407671699, + "learning_rate": 4.3655668273199305e-06, + "loss": 0.249, + "step": 694 + }, + { + "epoch": 1.0825545171339563, + "grad_norm": 0.6671186706045426, + "learning_rate": 4.353434003739872e-06, + "loss": 0.162, + "step": 695 + }, + { + "epoch": 1.0841121495327102, + "grad_norm": 0.5657453591665842, + "learning_rate": 4.341305050795907e-06, + "loss": 0.2355, + "step": 696 + }, + { + "epoch": 1.0856697819314642, + "grad_norm": 0.5876907165277706, + "learning_rate": 4.329180041097425e-06, + "loss": 0.2898, + "step": 697 + }, + { + "epoch": 1.087227414330218, + "grad_norm": 0.5418335332942893, + "learning_rate": 4.3170590472302125e-06, + "loss": 0.2049, + "step": 698 + }, + { + "epoch": 1.088785046728972, + "grad_norm": 0.6194906159202879, + "learning_rate": 4.304942141756012e-06, + "loss": 0.2716, + "step": 699 + }, + { + "epoch": 1.0903426791277258, + "grad_norm": 0.6321743236263448, + "learning_rate": 4.292829397212094e-06, + "loss": 0.1858, + "step": 700 + }, + { + "epoch": 1.0919003115264798, + "grad_norm": 0.5884478735032194, + "learning_rate": 4.280720886110815e-06, + "loss": 0.2181, + "step": 701 + }, + { + "epoch": 1.0934579439252337, + "grad_norm": 0.512888102374138, + "learning_rate": 4.268616680939197e-06, + "loss": 0.2495, + "step": 702 + }, + { + "epoch": 1.0950155763239875, + "grad_norm": 0.6806148269647997, + "learning_rate": 4.256516854158476e-06, + "loss": 0.1811, + "step": 703 + }, + { + "epoch": 1.0965732087227413, + "grad_norm": 0.590183243846604, + "learning_rate": 4.244421478203681e-06, + "loss": 0.1759, + "step": 704 + }, + { + "epoch": 1.0981308411214954, + "grad_norm": 0.7141876070652246, + "learning_rate": 4.232330625483194e-06, + "loss": 0.2155, + "step": 705 + }, + { + "epoch": 1.0996884735202492, + "grad_norm": 0.5606859986060387, + "learning_rate": 4.220244368378324e-06, + "loss": 0.1748, + "step": 706 + }, + { + "epoch": 1.101246105919003, + "grad_norm": 0.5374363163472021, + "learning_rate": 4.208162779242862e-06, + "loss": 0.15, + "step": 707 + }, + { + "epoch": 1.102803738317757, + "grad_norm": 0.6337046437481759, + "learning_rate": 4.19608593040266e-06, + "loss": 0.2094, + "step": 708 + }, + { + "epoch": 1.104361370716511, + "grad_norm": 0.6441679702023525, + "learning_rate": 4.184013894155187e-06, + "loss": 0.1347, + "step": 709 + }, + { + "epoch": 1.1059190031152648, + "grad_norm": 0.5105399247363097, + "learning_rate": 4.171946742769109e-06, + "loss": 0.2492, + "step": 710 + }, + { + "epoch": 1.1074766355140186, + "grad_norm": 0.835104618126293, + "learning_rate": 4.1598845484838405e-06, + "loss": 0.6552, + "step": 711 + }, + { + "epoch": 1.1090342679127725, + "grad_norm": 0.5091044357624278, + "learning_rate": 4.147827383509127e-06, + "loss": 0.2459, + "step": 712 + }, + { + "epoch": 1.1105919003115265, + "grad_norm": 0.4173053058204635, + "learning_rate": 4.135775320024601e-06, + "loss": 0.1834, + "step": 713 + }, + { + "epoch": 1.1121495327102804, + "grad_norm": 0.516073956820414, + "learning_rate": 4.123728430179363e-06, + "loss": 0.2096, + "step": 714 + }, + { + "epoch": 1.1137071651090342, + "grad_norm": 0.5229385439265322, + "learning_rate": 4.111686786091534e-06, + "loss": 0.1619, + "step": 715 + }, + { + "epoch": 1.115264797507788, + "grad_norm": 0.697243292089673, + "learning_rate": 4.099650459847835e-06, + "loss": 0.2181, + "step": 716 + }, + { + "epoch": 1.1168224299065421, + "grad_norm": 0.7935991971149668, + "learning_rate": 4.087619523503149e-06, + "loss": 0.7976, + "step": 717 + }, + { + "epoch": 1.118380062305296, + "grad_norm": 0.7468568843345416, + "learning_rate": 4.0755940490801e-06, + "loss": 0.1844, + "step": 718 + }, + { + "epoch": 1.1199376947040498, + "grad_norm": 0.655225509433663, + "learning_rate": 4.0635741085686065e-06, + "loss": 0.2016, + "step": 719 + }, + { + "epoch": 1.1214953271028036, + "grad_norm": 0.6590392753571896, + "learning_rate": 4.051559773925462e-06, + "loss": 0.2175, + "step": 720 + }, + { + "epoch": 1.1230529595015577, + "grad_norm": 0.5673140068671504, + "learning_rate": 4.039551117073899e-06, + "loss": 0.1377, + "step": 721 + }, + { + "epoch": 1.1246105919003115, + "grad_norm": 0.5308315587462934, + "learning_rate": 4.027548209903165e-06, + "loss": 0.1628, + "step": 722 + }, + { + "epoch": 1.1261682242990654, + "grad_norm": 0.5915750734678845, + "learning_rate": 4.01555112426808e-06, + "loss": 0.1005, + "step": 723 + }, + { + "epoch": 1.1277258566978192, + "grad_norm": 0.6605084387776982, + "learning_rate": 4.00355993198862e-06, + "loss": 0.1835, + "step": 724 + }, + { + "epoch": 1.1292834890965733, + "grad_norm": 0.5150684098328038, + "learning_rate": 3.991574704849474e-06, + "loss": 0.2452, + "step": 725 + }, + { + "epoch": 1.1308411214953271, + "grad_norm": 0.5762133251543371, + "learning_rate": 3.97959551459963e-06, + "loss": 0.7117, + "step": 726 + }, + { + "epoch": 1.132398753894081, + "grad_norm": 0.5992236027329398, + "learning_rate": 3.967622432951931e-06, + "loss": 0.2487, + "step": 727 + }, + { + "epoch": 1.1339563862928348, + "grad_norm": 0.6328864728376592, + "learning_rate": 3.955655531582649e-06, + "loss": 0.237, + "step": 728 + }, + { + "epoch": 1.1355140186915889, + "grad_norm": 0.5851714493036804, + "learning_rate": 3.943694882131061e-06, + "loss": 0.2293, + "step": 729 + }, + { + "epoch": 1.1370716510903427, + "grad_norm": 0.6659466040289207, + "learning_rate": 3.931740556199021e-06, + "loss": 0.7144, + "step": 730 + }, + { + "epoch": 1.1386292834890965, + "grad_norm": 0.8181017630361279, + "learning_rate": 3.91979262535052e-06, + "loss": 0.4389, + "step": 731 + }, + { + "epoch": 1.1401869158878504, + "grad_norm": 0.6310519685249386, + "learning_rate": 3.907851161111269e-06, + "loss": 0.1969, + "step": 732 + }, + { + "epoch": 1.1417445482866044, + "grad_norm": 0.5414429317292473, + "learning_rate": 3.895916234968267e-06, + "loss": 0.1969, + "step": 733 + }, + { + "epoch": 1.1433021806853583, + "grad_norm": 0.5694107952685924, + "learning_rate": 3.883987918369373e-06, + "loss": 0.2409, + "step": 734 + }, + { + "epoch": 1.144859813084112, + "grad_norm": 0.5303093457895504, + "learning_rate": 3.8720662827228774e-06, + "loss": 0.1599, + "step": 735 + }, + { + "epoch": 1.146417445482866, + "grad_norm": 0.5798145908998529, + "learning_rate": 3.860151399397077e-06, + "loss": 0.2864, + "step": 736 + }, + { + "epoch": 1.14797507788162, + "grad_norm": 0.7216318366333795, + "learning_rate": 3.848243339719841e-06, + "loss": 0.1274, + "step": 737 + }, + { + "epoch": 1.1495327102803738, + "grad_norm": 0.5363385976748041, + "learning_rate": 3.836342174978199e-06, + "loss": 0.1679, + "step": 738 + }, + { + "epoch": 1.1510903426791277, + "grad_norm": 0.5904714591263907, + "learning_rate": 3.824447976417897e-06, + "loss": 0.1803, + "step": 739 + }, + { + "epoch": 1.1526479750778815, + "grad_norm": 0.7957820087907058, + "learning_rate": 3.8125608152429777e-06, + "loss": 0.2525, + "step": 740 + }, + { + "epoch": 1.1542056074766356, + "grad_norm": 0.6852233460884398, + "learning_rate": 3.8006807626153565e-06, + "loss": 0.6679, + "step": 741 + }, + { + "epoch": 1.1557632398753894, + "grad_norm": 0.7058769394932962, + "learning_rate": 3.7888078896543984e-06, + "loss": 0.1596, + "step": 742 + }, + { + "epoch": 1.1573208722741433, + "grad_norm": 0.5829336602902025, + "learning_rate": 3.776942267436482e-06, + "loss": 0.1256, + "step": 743 + }, + { + "epoch": 1.158878504672897, + "grad_norm": 0.4936789586579176, + "learning_rate": 3.7650839669945804e-06, + "loss": 0.2213, + "step": 744 + }, + { + "epoch": 1.1604361370716512, + "grad_norm": 0.6225558777529572, + "learning_rate": 3.7532330593178356e-06, + "loss": 0.1667, + "step": 745 + }, + { + "epoch": 1.161993769470405, + "grad_norm": 0.5614342757375034, + "learning_rate": 3.741389615351136e-06, + "loss": 0.1611, + "step": 746 + }, + { + "epoch": 1.1635514018691588, + "grad_norm": 0.5754099281842366, + "learning_rate": 3.729553705994685e-06, + "loss": 0.2156, + "step": 747 + }, + { + "epoch": 1.1651090342679127, + "grad_norm": 0.5573995209639809, + "learning_rate": 3.7177254021035824e-06, + "loss": 0.1777, + "step": 748 + }, + { + "epoch": 1.1666666666666667, + "grad_norm": 0.6243355246035951, + "learning_rate": 3.705904774487396e-06, + "loss": 0.2092, + "step": 749 + }, + { + "epoch": 1.1682242990654206, + "grad_norm": 0.7072395196568583, + "learning_rate": 3.694091893909746e-06, + "loss": 0.1916, + "step": 750 + }, + { + "epoch": 1.1697819314641744, + "grad_norm": 0.5876756814093886, + "learning_rate": 3.6822868310878683e-06, + "loss": 0.1514, + "step": 751 + }, + { + "epoch": 1.1713395638629283, + "grad_norm": 0.51963445472172, + "learning_rate": 3.670489656692202e-06, + "loss": 0.2446, + "step": 752 + }, + { + "epoch": 1.1728971962616823, + "grad_norm": 0.6338437719274572, + "learning_rate": 3.658700441345959e-06, + "loss": 0.2041, + "step": 753 + }, + { + "epoch": 1.1744548286604362, + "grad_norm": 0.7029067603645407, + "learning_rate": 3.646919255624711e-06, + "loss": 0.1852, + "step": 754 + }, + { + "epoch": 1.17601246105919, + "grad_norm": 2.8451831521816353, + "learning_rate": 3.6351461700559564e-06, + "loss": 0.6003, + "step": 755 + }, + { + "epoch": 1.1775700934579438, + "grad_norm": 0.5800825083133982, + "learning_rate": 3.623381255118702e-06, + "loss": 0.1983, + "step": 756 + }, + { + "epoch": 1.179127725856698, + "grad_norm": 0.7108249246289187, + "learning_rate": 3.6116245812430404e-06, + "loss": 0.1904, + "step": 757 + }, + { + "epoch": 1.1806853582554517, + "grad_norm": 0.5537550002098082, + "learning_rate": 3.5998762188097364e-06, + "loss": 0.2697, + "step": 758 + }, + { + "epoch": 1.1822429906542056, + "grad_norm": 0.5354252171867416, + "learning_rate": 3.588136238149791e-06, + "loss": 0.2256, + "step": 759 + }, + { + "epoch": 1.1838006230529594, + "grad_norm": 0.5746205607851972, + "learning_rate": 3.5764047095440313e-06, + "loss": 0.2216, + "step": 760 + }, + { + "epoch": 1.1853582554517135, + "grad_norm": 0.6474421569647941, + "learning_rate": 3.5646817032226855e-06, + "loss": 0.18, + "step": 761 + }, + { + "epoch": 1.1869158878504673, + "grad_norm": 0.6499250531970322, + "learning_rate": 3.552967289364967e-06, + "loss": 0.1564, + "step": 762 + }, + { + "epoch": 1.1884735202492211, + "grad_norm": 0.6915421275577466, + "learning_rate": 3.541261538098647e-06, + "loss": 0.1937, + "step": 763 + }, + { + "epoch": 1.190031152647975, + "grad_norm": 0.7484332653622916, + "learning_rate": 3.529564519499641e-06, + "loss": 0.8181, + "step": 764 + }, + { + "epoch": 1.191588785046729, + "grad_norm": 0.5487998303234818, + "learning_rate": 3.517876303591581e-06, + "loss": 0.2187, + "step": 765 + }, + { + "epoch": 1.1931464174454829, + "grad_norm": 0.5468867601412329, + "learning_rate": 3.506196960345413e-06, + "loss": 0.2741, + "step": 766 + }, + { + "epoch": 1.1947040498442367, + "grad_norm": 0.5333599222192844, + "learning_rate": 3.494526559678958e-06, + "loss": 0.196, + "step": 767 + }, + { + "epoch": 1.1962616822429906, + "grad_norm": 0.6163139809793089, + "learning_rate": 3.4828651714565056e-06, + "loss": 0.1689, + "step": 768 + }, + { + "epoch": 1.1978193146417446, + "grad_norm": 0.6118788241013663, + "learning_rate": 3.4712128654883915e-06, + "loss": 0.6684, + "step": 769 + }, + { + "epoch": 1.1993769470404985, + "grad_norm": 0.7588288371409719, + "learning_rate": 3.459569711530586e-06, + "loss": 0.1844, + "step": 770 + }, + { + "epoch": 1.2009345794392523, + "grad_norm": 0.512022978994329, + "learning_rate": 3.447935779284265e-06, + "loss": 0.2842, + "step": 771 + }, + { + "epoch": 1.2024922118380061, + "grad_norm": 0.5591196557510058, + "learning_rate": 3.436311138395402e-06, + "loss": 0.1664, + "step": 772 + }, + { + "epoch": 1.2040498442367602, + "grad_norm": 0.8115349813699498, + "learning_rate": 3.424695858454347e-06, + "loss": 0.307, + "step": 773 + }, + { + "epoch": 1.205607476635514, + "grad_norm": 0.5218970414061956, + "learning_rate": 3.4130900089954142e-06, + "loss": 0.2348, + "step": 774 + }, + { + "epoch": 1.2071651090342679, + "grad_norm": 0.6595592104442388, + "learning_rate": 3.4014936594964608e-06, + "loss": 0.2381, + "step": 775 + }, + { + "epoch": 1.2087227414330217, + "grad_norm": 0.5724303041429597, + "learning_rate": 3.3899068793784717e-06, + "loss": 0.2087, + "step": 776 + }, + { + "epoch": 1.2102803738317758, + "grad_norm": 0.5855841777284334, + "learning_rate": 3.378329738005144e-06, + "loss": 0.2215, + "step": 777 + }, + { + "epoch": 1.2118380062305296, + "grad_norm": 0.5794484038490832, + "learning_rate": 3.3667623046824783e-06, + "loss": 0.1275, + "step": 778 + }, + { + "epoch": 1.2133956386292835, + "grad_norm": 0.8252092993882152, + "learning_rate": 3.3552046486583547e-06, + "loss": 0.2076, + "step": 779 + }, + { + "epoch": 1.2149532710280373, + "grad_norm": 0.5054517090774381, + "learning_rate": 3.3436568391221215e-06, + "loss": 0.2269, + "step": 780 + }, + { + "epoch": 1.2165109034267914, + "grad_norm": 0.6361245982706095, + "learning_rate": 3.3321189452041814e-06, + "loss": 0.1765, + "step": 781 + }, + { + "epoch": 1.2180685358255452, + "grad_norm": 0.6359846065805195, + "learning_rate": 3.3205910359755823e-06, + "loss": 0.213, + "step": 782 + }, + { + "epoch": 1.219626168224299, + "grad_norm": 0.575827824012367, + "learning_rate": 3.309073180447593e-06, + "loss": 0.2128, + "step": 783 + }, + { + "epoch": 1.2211838006230529, + "grad_norm": 0.594472470089166, + "learning_rate": 3.2975654475713005e-06, + "loss": 0.1867, + "step": 784 + }, + { + "epoch": 1.222741433021807, + "grad_norm": 0.6841555235014364, + "learning_rate": 3.286067906237188e-06, + "loss": 0.1615, + "step": 785 + }, + { + "epoch": 1.2242990654205608, + "grad_norm": 0.5190015178190842, + "learning_rate": 3.274580625274737e-06, + "loss": 0.2139, + "step": 786 + }, + { + "epoch": 1.2258566978193146, + "grad_norm": 0.675548730024154, + "learning_rate": 3.263103673451996e-06, + "loss": 0.2535, + "step": 787 + }, + { + "epoch": 1.2274143302180685, + "grad_norm": 0.7466550657712839, + "learning_rate": 3.2516371194751838e-06, + "loss": 0.2706, + "step": 788 + }, + { + "epoch": 1.2289719626168225, + "grad_norm": 0.6666523188287753, + "learning_rate": 3.24018103198827e-06, + "loss": 0.1727, + "step": 789 + }, + { + "epoch": 1.2305295950155763, + "grad_norm": 0.6337742576262074, + "learning_rate": 3.22873547957257e-06, + "loss": 0.1979, + "step": 790 + }, + { + "epoch": 1.2320872274143302, + "grad_norm": 0.5592619596660579, + "learning_rate": 3.217300530746331e-06, + "loss": 0.2195, + "step": 791 + }, + { + "epoch": 1.233644859813084, + "grad_norm": 0.6098769962820866, + "learning_rate": 3.2058762539643214e-06, + "loss": 0.231, + "step": 792 + }, + { + "epoch": 1.235202492211838, + "grad_norm": 0.5644158815873026, + "learning_rate": 3.1944627176174204e-06, + "loss": 0.2373, + "step": 793 + }, + { + "epoch": 1.236760124610592, + "grad_norm": 0.9387436364990357, + "learning_rate": 3.1830599900322135e-06, + "loss": 0.8066, + "step": 794 + }, + { + "epoch": 1.2383177570093458, + "grad_norm": 0.5202721107932754, + "learning_rate": 3.1716681394705783e-06, + "loss": 0.2198, + "step": 795 + }, + { + "epoch": 1.2398753894080996, + "grad_norm": 0.5496594883977267, + "learning_rate": 3.1602872341292772e-06, + "loss": 0.1961, + "step": 796 + }, + { + "epoch": 1.2414330218068537, + "grad_norm": 0.6098209049046375, + "learning_rate": 3.148917342139548e-06, + "loss": 0.2209, + "step": 797 + }, + { + "epoch": 1.2429906542056075, + "grad_norm": 0.6346192193491992, + "learning_rate": 3.1375585315667047e-06, + "loss": 0.1802, + "step": 798 + }, + { + "epoch": 1.2445482866043613, + "grad_norm": 0.765060316147878, + "learning_rate": 3.1262108704097137e-06, + "loss": 0.7635, + "step": 799 + }, + { + "epoch": 1.2461059190031152, + "grad_norm": 0.5914294283180993, + "learning_rate": 3.1148744266008024e-06, + "loss": 0.1632, + "step": 800 + }, + { + "epoch": 1.2476635514018692, + "grad_norm": 0.6570214224554458, + "learning_rate": 3.1035492680050416e-06, + "loss": 0.2406, + "step": 801 + }, + { + "epoch": 1.249221183800623, + "grad_norm": 0.5452895252692876, + "learning_rate": 3.0922354624199487e-06, + "loss": 0.2458, + "step": 802 + }, + { + "epoch": 1.250778816199377, + "grad_norm": 0.8021393101552716, + "learning_rate": 3.080933077575071e-06, + "loss": 0.2422, + "step": 803 + }, + { + "epoch": 1.2523364485981308, + "grad_norm": 0.5980931498074656, + "learning_rate": 3.0696421811315923e-06, + "loss": 0.3071, + "step": 804 + }, + { + "epoch": 1.2538940809968846, + "grad_norm": 0.7151706000107999, + "learning_rate": 3.0583628406819167e-06, + "loss": 0.1693, + "step": 805 + }, + { + "epoch": 1.2554517133956387, + "grad_norm": 0.7557667483815892, + "learning_rate": 3.0470951237492724e-06, + "loss": 0.6472, + "step": 806 + }, + { + "epoch": 1.2570093457943925, + "grad_norm": 0.5383018776710425, + "learning_rate": 3.035839097787301e-06, + "loss": 0.2179, + "step": 807 + }, + { + "epoch": 1.2585669781931463, + "grad_norm": 0.5591749963571674, + "learning_rate": 3.024594830179658e-06, + "loss": 0.2537, + "step": 808 + }, + { + "epoch": 1.2601246105919004, + "grad_norm": 0.5434411022961728, + "learning_rate": 3.0133623882396067e-06, + "loss": 0.1933, + "step": 809 + }, + { + "epoch": 1.2616822429906542, + "grad_norm": 0.6201199414157127, + "learning_rate": 3.0021418392096215e-06, + "loss": 0.2193, + "step": 810 + }, + { + "epoch": 1.263239875389408, + "grad_norm": 0.669872737304252, + "learning_rate": 2.9909332502609744e-06, + "loss": 0.1446, + "step": 811 + }, + { + "epoch": 1.264797507788162, + "grad_norm": 0.5828212724381946, + "learning_rate": 2.979736688493343e-06, + "loss": 0.253, + "step": 812 + }, + { + "epoch": 1.2663551401869158, + "grad_norm": 0.5644246073635343, + "learning_rate": 2.968552220934402e-06, + "loss": 0.2241, + "step": 813 + }, + { + "epoch": 1.2679127725856698, + "grad_norm": 0.5607183966563147, + "learning_rate": 2.9573799145394266e-06, + "loss": 0.2745, + "step": 814 + }, + { + "epoch": 1.2694704049844237, + "grad_norm": 0.474278573645314, + "learning_rate": 2.9462198361908893e-06, + "loss": 0.1337, + "step": 815 + }, + { + "epoch": 1.2710280373831775, + "grad_norm": 1.3740104676242695, + "learning_rate": 2.9350720526980592e-06, + "loss": 0.7275, + "step": 816 + }, + { + "epoch": 1.2725856697819315, + "grad_norm": 0.711919465609596, + "learning_rate": 2.9239366307966033e-06, + "loss": 0.7703, + "step": 817 + }, + { + "epoch": 1.2741433021806854, + "grad_norm": 0.6630095165519386, + "learning_rate": 2.912813637148187e-06, + "loss": 0.1362, + "step": 818 + }, + { + "epoch": 1.2757009345794392, + "grad_norm": 0.602978479954925, + "learning_rate": 2.9017031383400772e-06, + "loss": 0.1538, + "step": 819 + }, + { + "epoch": 1.277258566978193, + "grad_norm": 0.7257094178132808, + "learning_rate": 2.890605200884733e-06, + "loss": 0.1533, + "step": 820 + }, + { + "epoch": 1.278816199376947, + "grad_norm": 0.5041273431174441, + "learning_rate": 2.879519891219424e-06, + "loss": 0.195, + "step": 821 + }, + { + "epoch": 1.280373831775701, + "grad_norm": 0.5315316010733497, + "learning_rate": 2.868447275705821e-06, + "loss": 0.1634, + "step": 822 + }, + { + "epoch": 1.2819314641744548, + "grad_norm": 0.6131393193633405, + "learning_rate": 2.8573874206296005e-06, + "loss": 0.2044, + "step": 823 + }, + { + "epoch": 1.2834890965732086, + "grad_norm": 0.5970183436040595, + "learning_rate": 2.846340392200051e-06, + "loss": 0.2539, + "step": 824 + }, + { + "epoch": 1.2850467289719627, + "grad_norm": 0.5065607828613443, + "learning_rate": 2.8353062565496715e-06, + "loss": 0.166, + "step": 825 + }, + { + "epoch": 1.2866043613707165, + "grad_norm": 0.6837585450327539, + "learning_rate": 2.824285079733788e-06, + "loss": 0.2406, + "step": 826 + }, + { + "epoch": 1.2881619937694704, + "grad_norm": 0.5599124367538377, + "learning_rate": 2.8132769277301374e-06, + "loss": 0.157, + "step": 827 + }, + { + "epoch": 1.2897196261682242, + "grad_norm": 0.49511182258884123, + "learning_rate": 2.8022818664384945e-06, + "loss": 0.1824, + "step": 828 + }, + { + "epoch": 1.291277258566978, + "grad_norm": 0.5896227768783965, + "learning_rate": 2.791299961680255e-06, + "loss": 0.258, + "step": 829 + }, + { + "epoch": 1.2928348909657321, + "grad_norm": 0.5730839746888057, + "learning_rate": 2.7803312791980697e-06, + "loss": 0.2379, + "step": 830 + }, + { + "epoch": 1.294392523364486, + "grad_norm": 0.6736027436106719, + "learning_rate": 2.769375884655421e-06, + "loss": 0.2515, + "step": 831 + }, + { + "epoch": 1.2959501557632398, + "grad_norm": 0.6242264709025079, + "learning_rate": 2.758433843636252e-06, + "loss": 0.1923, + "step": 832 + }, + { + "epoch": 1.2975077881619939, + "grad_norm": 0.6286501649370759, + "learning_rate": 2.7475052216445588e-06, + "loss": 0.1842, + "step": 833 + }, + { + "epoch": 1.2990654205607477, + "grad_norm": 0.5914028874575505, + "learning_rate": 2.7365900841040104e-06, + "loss": 0.1473, + "step": 834 + }, + { + "epoch": 1.3006230529595015, + "grad_norm": 0.6185390608342917, + "learning_rate": 2.7256884963575536e-06, + "loss": 0.2347, + "step": 835 + }, + { + "epoch": 1.3021806853582554, + "grad_norm": 0.5490349592608532, + "learning_rate": 2.714800523667011e-06, + "loss": 0.1875, + "step": 836 + }, + { + "epoch": 1.3037383177570092, + "grad_norm": 0.619509184599072, + "learning_rate": 2.703926231212708e-06, + "loss": 0.2472, + "step": 837 + }, + { + "epoch": 1.3052959501557633, + "grad_norm": 0.7158852617243849, + "learning_rate": 2.6930656840930713e-06, + "loss": 0.2468, + "step": 838 + }, + { + "epoch": 1.3068535825545171, + "grad_norm": 0.723996693315863, + "learning_rate": 2.6822189473242422e-06, + "loss": 0.2276, + "step": 839 + }, + { + "epoch": 1.308411214953271, + "grad_norm": 0.5632679246702721, + "learning_rate": 2.671386085839682e-06, + "loss": 0.2215, + "step": 840 + }, + { + "epoch": 1.309968847352025, + "grad_norm": 0.5985281299233187, + "learning_rate": 2.6605671644897967e-06, + "loss": 0.3572, + "step": 841 + }, + { + "epoch": 1.3115264797507789, + "grad_norm": 0.686365038362172, + "learning_rate": 2.6497622480415346e-06, + "loss": 0.3266, + "step": 842 + }, + { + "epoch": 1.3130841121495327, + "grad_norm": 0.6582167035193466, + "learning_rate": 2.6389714011780078e-06, + "loss": 0.2669, + "step": 843 + }, + { + "epoch": 1.3146417445482865, + "grad_norm": 0.5788593198598405, + "learning_rate": 2.628194688498096e-06, + "loss": 0.2572, + "step": 844 + }, + { + "epoch": 1.3161993769470404, + "grad_norm": 0.5457859405003289, + "learning_rate": 2.6174321745160707e-06, + "loss": 0.2016, + "step": 845 + }, + { + "epoch": 1.3177570093457944, + "grad_norm": 0.5093352344419979, + "learning_rate": 2.606683923661204e-06, + "loss": 0.2152, + "step": 846 + }, + { + "epoch": 1.3193146417445483, + "grad_norm": 0.6332315738079737, + "learning_rate": 2.5959500002773763e-06, + "loss": 0.1449, + "step": 847 + }, + { + "epoch": 1.320872274143302, + "grad_norm": 0.4685817262067038, + "learning_rate": 2.585230468622705e-06, + "loss": 0.2152, + "step": 848 + }, + { + "epoch": 1.3224299065420562, + "grad_norm": 0.49981674536826676, + "learning_rate": 2.574525392869143e-06, + "loss": 0.1783, + "step": 849 + }, + { + "epoch": 1.32398753894081, + "grad_norm": 1.0724140185603979, + "learning_rate": 2.563834837102115e-06, + "loss": 0.6521, + "step": 850 + }, + { + "epoch": 1.3255451713395638, + "grad_norm": 0.5582924411201017, + "learning_rate": 2.5531588653201134e-06, + "loss": 0.1696, + "step": 851 + }, + { + "epoch": 1.3271028037383177, + "grad_norm": 0.5064036048508646, + "learning_rate": 2.542497541434329e-06, + "loss": 0.1616, + "step": 852 + }, + { + "epoch": 1.3286604361370715, + "grad_norm": 0.7424483742643145, + "learning_rate": 2.531850929268258e-06, + "loss": 0.3, + "step": 853 + }, + { + "epoch": 1.3302180685358256, + "grad_norm": 0.6230465177616844, + "learning_rate": 2.5212190925573366e-06, + "loss": 0.638, + "step": 854 + }, + { + "epoch": 1.3317757009345794, + "grad_norm": 0.5918692120070366, + "learning_rate": 2.510602094948535e-06, + "loss": 0.1742, + "step": 855 + }, + { + "epoch": 1.3333333333333333, + "grad_norm": 0.6010294004442239, + "learning_rate": 2.5000000000000015e-06, + "loss": 0.2177, + "step": 856 + }, + { + "epoch": 1.3348909657320873, + "grad_norm": 0.8792131655034126, + "learning_rate": 2.4894128711806603e-06, + "loss": 0.1769, + "step": 857 + }, + { + "epoch": 1.3364485981308412, + "grad_norm": 0.5439058673773991, + "learning_rate": 2.4788407718698487e-06, + "loss": 0.1423, + "step": 858 + }, + { + "epoch": 1.338006230529595, + "grad_norm": 0.6698324814866782, + "learning_rate": 2.468283765356931e-06, + "loss": 0.2016, + "step": 859 + }, + { + "epoch": 1.3395638629283488, + "grad_norm": 0.7209231410413558, + "learning_rate": 2.4577419148409123e-06, + "loss": 0.2526, + "step": 860 + }, + { + "epoch": 1.3411214953271027, + "grad_norm": 0.5554513132189719, + "learning_rate": 2.447215283430072e-06, + "loss": 0.2054, + "step": 861 + }, + { + "epoch": 1.3426791277258567, + "grad_norm": 0.6204181777681318, + "learning_rate": 2.436703934141583e-06, + "loss": 0.2055, + "step": 862 + }, + { + "epoch": 1.3442367601246106, + "grad_norm": 0.6174847596208328, + "learning_rate": 2.4262079299011287e-06, + "loss": 0.2214, + "step": 863 + }, + { + "epoch": 1.3457943925233644, + "grad_norm": 0.596347448095447, + "learning_rate": 2.4157273335425296e-06, + "loss": 0.2396, + "step": 864 + }, + { + "epoch": 1.3473520249221185, + "grad_norm": 0.6945784756475979, + "learning_rate": 2.4052622078073714e-06, + "loss": 0.6505, + "step": 865 + }, + { + "epoch": 1.3489096573208723, + "grad_norm": 0.6484352021895371, + "learning_rate": 2.394812615344622e-06, + "loss": 0.2738, + "step": 866 + }, + { + "epoch": 1.3504672897196262, + "grad_norm": 0.5351329498045785, + "learning_rate": 2.384378618710265e-06, + "loss": 0.1667, + "step": 867 + }, + { + "epoch": 1.35202492211838, + "grad_norm": 0.5884847134807437, + "learning_rate": 2.3739602803669126e-06, + "loss": 0.2267, + "step": 868 + }, + { + "epoch": 1.3535825545171338, + "grad_norm": 0.7327148616338638, + "learning_rate": 2.363557662683446e-06, + "loss": 0.7327, + "step": 869 + }, + { + "epoch": 1.355140186915888, + "grad_norm": 0.712258388986809, + "learning_rate": 2.3531708279346347e-06, + "loss": 0.2281, + "step": 870 + }, + { + "epoch": 1.3566978193146417, + "grad_norm": 0.598978404959667, + "learning_rate": 2.3427998383007605e-06, + "loss": 0.2433, + "step": 871 + }, + { + "epoch": 1.3582554517133956, + "grad_norm": 0.6295698566133894, + "learning_rate": 2.3324447558672543e-06, + "loss": 0.2569, + "step": 872 + }, + { + "epoch": 1.3598130841121496, + "grad_norm": 0.5986998801181967, + "learning_rate": 2.3221056426243112e-06, + "loss": 0.1962, + "step": 873 + }, + { + "epoch": 1.3613707165109035, + "grad_norm": 0.5674067782697227, + "learning_rate": 2.3117825604665405e-06, + "loss": 0.2158, + "step": 874 + }, + { + "epoch": 1.3629283489096573, + "grad_norm": 0.6884839665928056, + "learning_rate": 2.3014755711925695e-06, + "loss": 0.2329, + "step": 875 + }, + { + "epoch": 1.3644859813084111, + "grad_norm": 0.6885485571502845, + "learning_rate": 2.291184736504695e-06, + "loss": 0.1955, + "step": 876 + }, + { + "epoch": 1.366043613707165, + "grad_norm": 0.5473669303308986, + "learning_rate": 2.2809101180084963e-06, + "loss": 0.2171, + "step": 877 + }, + { + "epoch": 1.367601246105919, + "grad_norm": 0.5860469422390239, + "learning_rate": 2.2706517772124875e-06, + "loss": 0.3142, + "step": 878 + }, + { + "epoch": 1.3691588785046729, + "grad_norm": 0.4589130720603424, + "learning_rate": 2.2604097755277244e-06, + "loss": 0.1892, + "step": 879 + }, + { + "epoch": 1.3707165109034267, + "grad_norm": 0.56817784386682, + "learning_rate": 2.250184174267459e-06, + "loss": 0.2104, + "step": 880 + }, + { + "epoch": 1.3722741433021808, + "grad_norm": 2.9170440438070244, + "learning_rate": 2.239975034646756e-06, + "loss": 0.4075, + "step": 881 + }, + { + "epoch": 1.3738317757009346, + "grad_norm": 0.5936408877749505, + "learning_rate": 2.2297824177821374e-06, + "loss": 0.1725, + "step": 882 + }, + { + "epoch": 1.3753894080996885, + "grad_norm": 0.5570669743525561, + "learning_rate": 2.2196063846912142e-06, + "loss": 0.1089, + "step": 883 + }, + { + "epoch": 1.3769470404984423, + "grad_norm": 0.7043574775467037, + "learning_rate": 2.209446996292312e-06, + "loss": 0.2412, + "step": 884 + }, + { + "epoch": 1.3785046728971961, + "grad_norm": 0.6356878135461869, + "learning_rate": 2.1993043134041213e-06, + "loss": 0.2314, + "step": 885 + }, + { + "epoch": 1.3800623052959502, + "grad_norm": 0.6088173529635026, + "learning_rate": 2.1891783967453235e-06, + "loss": 0.1986, + "step": 886 + }, + { + "epoch": 1.381619937694704, + "grad_norm": 0.6505134414780895, + "learning_rate": 2.1790693069342293e-06, + "loss": 0.2621, + "step": 887 + }, + { + "epoch": 1.3831775700934579, + "grad_norm": 0.5229082442758375, + "learning_rate": 2.168977104488415e-06, + "loss": 0.1857, + "step": 888 + }, + { + "epoch": 1.384735202492212, + "grad_norm": 0.6219752894683416, + "learning_rate": 2.158901849824363e-06, + "loss": 0.2393, + "step": 889 + }, + { + "epoch": 1.3862928348909658, + "grad_norm": 0.6712717762459158, + "learning_rate": 2.1488436032571e-06, + "loss": 0.1975, + "step": 890 + }, + { + "epoch": 1.3878504672897196, + "grad_norm": 0.5882444001428154, + "learning_rate": 2.138802424999833e-06, + "loss": 0.1938, + "step": 891 + }, + { + "epoch": 1.3894080996884735, + "grad_norm": 0.7887785551105917, + "learning_rate": 2.128778375163588e-06, + "loss": 0.2438, + "step": 892 + }, + { + "epoch": 1.3909657320872273, + "grad_norm": 0.6726478463164812, + "learning_rate": 2.1187715137568564e-06, + "loss": 0.1609, + "step": 893 + }, + { + "epoch": 1.3925233644859814, + "grad_norm": 1.1772556659919378, + "learning_rate": 2.1087819006852327e-06, + "loss": 0.1874, + "step": 894 + }, + { + "epoch": 1.3940809968847352, + "grad_norm": 0.6141735195326121, + "learning_rate": 2.098809595751049e-06, + "loss": 0.1771, + "step": 895 + }, + { + "epoch": 1.395638629283489, + "grad_norm": 0.574616048911299, + "learning_rate": 2.08885465865303e-06, + "loss": 0.1973, + "step": 896 + }, + { + "epoch": 1.397196261682243, + "grad_norm": 0.7291041551854346, + "learning_rate": 2.078917148985921e-06, + "loss": 0.7534, + "step": 897 + }, + { + "epoch": 1.398753894080997, + "grad_norm": 0.5907252824302475, + "learning_rate": 2.06899712624015e-06, + "loss": 0.2802, + "step": 898 + }, + { + "epoch": 1.4003115264797508, + "grad_norm": 0.5328199626427012, + "learning_rate": 2.0590946498014493e-06, + "loss": 0.2303, + "step": 899 + }, + { + "epoch": 1.4018691588785046, + "grad_norm": 0.6150130691322249, + "learning_rate": 2.049209778950518e-06, + "loss": 0.2424, + "step": 900 + }, + { + "epoch": 1.4034267912772584, + "grad_norm": 0.6127344165171512, + "learning_rate": 2.0393425728626526e-06, + "loss": 0.1674, + "step": 901 + }, + { + "epoch": 1.4049844236760125, + "grad_norm": 0.6455010665412071, + "learning_rate": 2.029493090607413e-06, + "loss": 0.201, + "step": 902 + }, + { + "epoch": 1.4065420560747663, + "grad_norm": 0.6680633935331085, + "learning_rate": 2.0196613911482445e-06, + "loss": 0.1652, + "step": 903 + }, + { + "epoch": 1.4080996884735202, + "grad_norm": 0.5230495421575583, + "learning_rate": 2.0098475333421426e-06, + "loss": 0.1869, + "step": 904 + }, + { + "epoch": 1.4096573208722742, + "grad_norm": 0.6484127593933029, + "learning_rate": 2.0000515759392904e-06, + "loss": 0.2167, + "step": 905 + }, + { + "epoch": 1.411214953271028, + "grad_norm": 0.6594541288417016, + "learning_rate": 1.9902735775827146e-06, + "loss": 0.3135, + "step": 906 + }, + { + "epoch": 1.412772585669782, + "grad_norm": 0.6546159891601314, + "learning_rate": 1.980513596807932e-06, + "loss": 0.1546, + "step": 907 + }, + { + "epoch": 1.4143302180685358, + "grad_norm": 0.6853904172880749, + "learning_rate": 1.9707716920425923e-06, + "loss": 0.2063, + "step": 908 + }, + { + "epoch": 1.4158878504672896, + "grad_norm": 0.6131385223592029, + "learning_rate": 1.9610479216061393e-06, + "loss": 0.2792, + "step": 909 + }, + { + "epoch": 1.4174454828660437, + "grad_norm": 0.47832488985489935, + "learning_rate": 1.951342343709456e-06, + "loss": 0.2819, + "step": 910 + }, + { + "epoch": 1.4190031152647975, + "grad_norm": 0.7260584028544175, + "learning_rate": 1.941655016454515e-06, + "loss": 0.1664, + "step": 911 + }, + { + "epoch": 1.4205607476635513, + "grad_norm": 0.531050945261339, + "learning_rate": 1.9319859978340312e-06, + "loss": 0.218, + "step": 912 + }, + { + "epoch": 1.4221183800623054, + "grad_norm": 0.7077461606190044, + "learning_rate": 1.9223353457311178e-06, + "loss": 0.2129, + "step": 913 + }, + { + "epoch": 1.4236760124610592, + "grad_norm": 0.6914074130890262, + "learning_rate": 1.912703117918938e-06, + "loss": 0.1846, + "step": 914 + }, + { + "epoch": 1.425233644859813, + "grad_norm": 0.7199368018705896, + "learning_rate": 1.9030893720603604e-06, + "loss": 0.2253, + "step": 915 + }, + { + "epoch": 1.426791277258567, + "grad_norm": 0.5583301531155793, + "learning_rate": 1.8934941657076057e-06, + "loss": 0.2059, + "step": 916 + }, + { + "epoch": 1.4283489096573208, + "grad_norm": 0.5646047767722963, + "learning_rate": 1.8839175563019145e-06, + "loss": 0.2514, + "step": 917 + }, + { + "epoch": 1.4299065420560748, + "grad_norm": 0.6817330899431177, + "learning_rate": 1.8743596011731984e-06, + "loss": 0.2018, + "step": 918 + }, + { + "epoch": 1.4314641744548287, + "grad_norm": 0.6478720709719429, + "learning_rate": 1.8648203575396912e-06, + "loss": 0.1172, + "step": 919 + }, + { + "epoch": 1.4330218068535825, + "grad_norm": 0.6481753359929265, + "learning_rate": 1.8552998825076163e-06, + "loss": 0.2396, + "step": 920 + }, + { + "epoch": 1.4345794392523366, + "grad_norm": 0.6132125740119907, + "learning_rate": 1.845798233070833e-06, + "loss": 0.2065, + "step": 921 + }, + { + "epoch": 1.4361370716510904, + "grad_norm": 0.5448210616281429, + "learning_rate": 1.8363154661105131e-06, + "loss": 0.2022, + "step": 922 + }, + { + "epoch": 1.4376947040498442, + "grad_norm": 0.5931140592473454, + "learning_rate": 1.826851638394777e-06, + "loss": 0.2241, + "step": 923 + }, + { + "epoch": 1.439252336448598, + "grad_norm": 0.5732273344982183, + "learning_rate": 1.8174068065783768e-06, + "loss": 0.238, + "step": 924 + }, + { + "epoch": 1.440809968847352, + "grad_norm": 0.6031290862741796, + "learning_rate": 1.8079810272023373e-06, + "loss": 0.2065, + "step": 925 + }, + { + "epoch": 1.442367601246106, + "grad_norm": 0.7027323074692515, + "learning_rate": 1.7985743566936338e-06, + "loss": 0.1353, + "step": 926 + }, + { + "epoch": 1.4439252336448598, + "grad_norm": 0.679661751499936, + "learning_rate": 1.7891868513648436e-06, + "loss": 0.1636, + "step": 927 + }, + { + "epoch": 1.4454828660436136, + "grad_norm": 0.6123098696997167, + "learning_rate": 1.7798185674138163e-06, + "loss": 0.7362, + "step": 928 + }, + { + "epoch": 1.4470404984423677, + "grad_norm": 0.6533459270958951, + "learning_rate": 1.7704695609233275e-06, + "loss": 0.2479, + "step": 929 + }, + { + "epoch": 1.4485981308411215, + "grad_norm": 0.6018511814849575, + "learning_rate": 1.7611398878607544e-06, + "loss": 0.2135, + "step": 930 + }, + { + "epoch": 1.4501557632398754, + "grad_norm": 0.6980529317039801, + "learning_rate": 1.7518296040777355e-06, + "loss": 0.1639, + "step": 931 + }, + { + "epoch": 1.4517133956386292, + "grad_norm": 0.6062016861868827, + "learning_rate": 1.742538765309832e-06, + "loss": 0.2538, + "step": 932 + }, + { + "epoch": 1.453271028037383, + "grad_norm": 0.6539624285807091, + "learning_rate": 1.7332674271762018e-06, + "loss": 0.1811, + "step": 933 + }, + { + "epoch": 1.4548286604361371, + "grad_norm": 0.7122687298587204, + "learning_rate": 1.724015645179264e-06, + "loss": 0.1891, + "step": 934 + }, + { + "epoch": 1.456386292834891, + "grad_norm": 0.7014287750312252, + "learning_rate": 1.7147834747043651e-06, + "loss": 0.1866, + "step": 935 + }, + { + "epoch": 1.4579439252336448, + "grad_norm": 0.6319275863012598, + "learning_rate": 1.7055709710194452e-06, + "loss": 0.1921, + "step": 936 + }, + { + "epoch": 1.4595015576323989, + "grad_norm": 0.6620029301577026, + "learning_rate": 1.6963781892747128e-06, + "loss": 0.1889, + "step": 937 + }, + { + "epoch": 1.4610591900311527, + "grad_norm": 0.6613697258053355, + "learning_rate": 1.6872051845023124e-06, + "loss": 0.2277, + "step": 938 + }, + { + "epoch": 1.4626168224299065, + "grad_norm": 0.6709827699093153, + "learning_rate": 1.6780520116159954e-06, + "loss": 0.2345, + "step": 939 + }, + { + "epoch": 1.4641744548286604, + "grad_norm": 0.6308994122097046, + "learning_rate": 1.6689187254107852e-06, + "loss": 0.2384, + "step": 940 + }, + { + "epoch": 1.4657320872274142, + "grad_norm": 0.49997735693518447, + "learning_rate": 1.6598053805626607e-06, + "loss": 0.2885, + "step": 941 + }, + { + "epoch": 1.4672897196261683, + "grad_norm": 0.5769855416317906, + "learning_rate": 1.6507120316282222e-06, + "loss": 0.2262, + "step": 942 + }, + { + "epoch": 1.4688473520249221, + "grad_norm": 0.7766043611879518, + "learning_rate": 1.6416387330443594e-06, + "loss": 0.2012, + "step": 943 + }, + { + "epoch": 1.470404984423676, + "grad_norm": 0.6306011227357949, + "learning_rate": 1.6325855391279399e-06, + "loss": 0.1672, + "step": 944 + }, + { + "epoch": 1.47196261682243, + "grad_norm": 0.6252880509484167, + "learning_rate": 1.623552504075467e-06, + "loss": 0.21, + "step": 945 + }, + { + "epoch": 1.4735202492211839, + "grad_norm": 0.5733165288371812, + "learning_rate": 1.6145396819627734e-06, + "loss": 0.203, + "step": 946 + }, + { + "epoch": 1.4750778816199377, + "grad_norm": 0.7571650504143702, + "learning_rate": 1.6055471267446804e-06, + "loss": 0.2244, + "step": 947 + }, + { + "epoch": 1.4766355140186915, + "grad_norm": 0.7478203882298783, + "learning_rate": 1.5965748922546876e-06, + "loss": 0.1799, + "step": 948 + }, + { + "epoch": 1.4781931464174454, + "grad_norm": 0.6868312970173284, + "learning_rate": 1.5876230322046393e-06, + "loss": 0.6746, + "step": 949 + }, + { + "epoch": 1.4797507788161994, + "grad_norm": 0.6037632401087822, + "learning_rate": 1.578691600184416e-06, + "loss": 0.2057, + "step": 950 + }, + { + "epoch": 1.4813084112149533, + "grad_norm": 0.6046826777481786, + "learning_rate": 1.569780649661603e-06, + "loss": 0.2926, + "step": 951 + }, + { + "epoch": 1.482866043613707, + "grad_norm": 0.6848611609536812, + "learning_rate": 1.5608902339811765e-06, + "loss": 0.2328, + "step": 952 + }, + { + "epoch": 1.4844236760124612, + "grad_norm": 0.5462582273781476, + "learning_rate": 1.5520204063651784e-06, + "loss": 0.2703, + "step": 953 + }, + { + "epoch": 1.485981308411215, + "grad_norm": 0.5459862014182448, + "learning_rate": 1.5431712199124033e-06, + "loss": 0.2268, + "step": 954 + }, + { + "epoch": 1.4875389408099688, + "grad_norm": 0.5909228089011523, + "learning_rate": 1.5343427275980804e-06, + "loss": 0.1918, + "step": 955 + }, + { + "epoch": 1.4890965732087227, + "grad_norm": 0.6922392218824316, + "learning_rate": 1.5255349822735494e-06, + "loss": 0.2065, + "step": 956 + }, + { + "epoch": 1.4906542056074765, + "grad_norm": 0.6347770175659823, + "learning_rate": 1.5167480366659538e-06, + "loss": 0.1683, + "step": 957 + }, + { + "epoch": 1.4922118380062306, + "grad_norm": 0.5516070303093964, + "learning_rate": 1.507981943377918e-06, + "loss": 0.2173, + "step": 958 + }, + { + "epoch": 1.4937694704049844, + "grad_norm": 0.6071049706243482, + "learning_rate": 1.4992367548872383e-06, + "loss": 0.2458, + "step": 959 + }, + { + "epoch": 1.4953271028037383, + "grad_norm": 0.6720642183082266, + "learning_rate": 1.490512523546559e-06, + "loss": 0.1915, + "step": 960 + }, + { + "epoch": 1.4968847352024923, + "grad_norm": 0.5147991401453758, + "learning_rate": 1.481809301583072e-06, + "loss": 0.2315, + "step": 961 + }, + { + "epoch": 1.4984423676012462, + "grad_norm": 0.5193622138782028, + "learning_rate": 1.4731271410981961e-06, + "loss": 0.2994, + "step": 962 + }, + { + "epoch": 1.5, + "grad_norm": 0.6483302843412619, + "learning_rate": 1.4644660940672628e-06, + "loss": 0.1486, + "step": 963 + }, + { + "epoch": 1.5015576323987538, + "grad_norm": 0.6211856895290482, + "learning_rate": 1.4558262123392125e-06, + "loss": 0.1979, + "step": 964 + }, + { + "epoch": 1.5031152647975077, + "grad_norm": 0.6267158569891346, + "learning_rate": 1.4472075476362819e-06, + "loss": 0.226, + "step": 965 + }, + { + "epoch": 1.5046728971962615, + "grad_norm": 0.6878081812868871, + "learning_rate": 1.4386101515536865e-06, + "loss": 0.215, + "step": 966 + }, + { + "epoch": 1.5062305295950156, + "grad_norm": 0.6013152247422209, + "learning_rate": 1.4300340755593256e-06, + "loss": 0.1853, + "step": 967 + }, + { + "epoch": 1.5077881619937694, + "grad_norm": 0.5961333640534994, + "learning_rate": 1.4214793709934644e-06, + "loss": 0.2134, + "step": 968 + }, + { + "epoch": 1.5093457943925235, + "grad_norm": 0.6714185068729337, + "learning_rate": 1.4129460890684255e-06, + "loss": 0.2285, + "step": 969 + }, + { + "epoch": 1.5109034267912773, + "grad_norm": 0.5287976544223729, + "learning_rate": 1.4044342808682904e-06, + "loss": 0.226, + "step": 970 + }, + { + "epoch": 1.5124610591900312, + "grad_norm": 0.7244263964282488, + "learning_rate": 1.3959439973485872e-06, + "loss": 0.1919, + "step": 971 + }, + { + "epoch": 1.514018691588785, + "grad_norm": 0.7411473501185608, + "learning_rate": 1.38747528933599e-06, + "loss": 0.1573, + "step": 972 + }, + { + "epoch": 1.5155763239875388, + "grad_norm": 0.7535346088046625, + "learning_rate": 1.3790282075280064e-06, + "loss": 0.1939, + "step": 973 + }, + { + "epoch": 1.5171339563862927, + "grad_norm": 0.7535256340431424, + "learning_rate": 1.3706028024926855e-06, + "loss": 0.1736, + "step": 974 + }, + { + "epoch": 1.5186915887850467, + "grad_norm": 0.5709920085521653, + "learning_rate": 1.3621991246683093e-06, + "loss": 0.222, + "step": 975 + }, + { + "epoch": 1.5202492211838006, + "grad_norm": 0.7297081050093409, + "learning_rate": 1.3538172243630848e-06, + "loss": 0.1788, + "step": 976 + }, + { + "epoch": 1.5218068535825546, + "grad_norm": 0.6180657113624616, + "learning_rate": 1.3454571517548554e-06, + "loss": 0.177, + "step": 977 + }, + { + "epoch": 1.5233644859813085, + "grad_norm": 0.591364040153287, + "learning_rate": 1.3371189568907915e-06, + "loss": 0.2128, + "step": 978 + }, + { + "epoch": 1.5249221183800623, + "grad_norm": 0.5022261849806481, + "learning_rate": 1.3288026896870964e-06, + "loss": 0.2267, + "step": 979 + }, + { + "epoch": 1.5264797507788161, + "grad_norm": 0.6203532747393247, + "learning_rate": 1.3205083999286973e-06, + "loss": 0.1955, + "step": 980 + }, + { + "epoch": 1.52803738317757, + "grad_norm": 0.6729761565050358, + "learning_rate": 1.3122361372689624e-06, + "loss": 0.1657, + "step": 981 + }, + { + "epoch": 1.5295950155763238, + "grad_norm": 0.6536059366129146, + "learning_rate": 1.3039859512293885e-06, + "loss": 0.2198, + "step": 982 + }, + { + "epoch": 1.5311526479750779, + "grad_norm": 0.695539877357984, + "learning_rate": 1.2957578911993212e-06, + "loss": 0.2023, + "step": 983 + }, + { + "epoch": 1.5327102803738317, + "grad_norm": 0.4424609062997814, + "learning_rate": 1.28755200643564e-06, + "loss": 0.2238, + "step": 984 + }, + { + "epoch": 1.5342679127725858, + "grad_norm": 0.5843664425742057, + "learning_rate": 1.27936834606248e-06, + "loss": 0.2197, + "step": 985 + }, + { + "epoch": 1.5358255451713396, + "grad_norm": 0.6148367721669108, + "learning_rate": 1.2712069590709265e-06, + "loss": 0.2004, + "step": 986 + }, + { + "epoch": 1.5373831775700935, + "grad_norm": 0.562472309572901, + "learning_rate": 1.2630678943187292e-06, + "loss": 0.1835, + "step": 987 + }, + { + "epoch": 1.5389408099688473, + "grad_norm": 0.7987485306921659, + "learning_rate": 1.2549512005300068e-06, + "loss": 0.1995, + "step": 988 + }, + { + "epoch": 1.5404984423676011, + "grad_norm": 0.6656097374020112, + "learning_rate": 1.246856926294953e-06, + "loss": 0.1874, + "step": 989 + }, + { + "epoch": 1.542056074766355, + "grad_norm": 0.7982996720144556, + "learning_rate": 1.23878512006955e-06, + "loss": 0.351, + "step": 990 + }, + { + "epoch": 1.543613707165109, + "grad_norm": 0.7237322332450993, + "learning_rate": 1.2307358301752753e-06, + "loss": 0.6736, + "step": 991 + }, + { + "epoch": 1.5451713395638629, + "grad_norm": 0.6606058936315011, + "learning_rate": 1.222709104798816e-06, + "loss": 0.2446, + "step": 992 + }, + { + "epoch": 1.546728971962617, + "grad_norm": 0.7491652827965601, + "learning_rate": 1.214704991991773e-06, + "loss": 0.1912, + "step": 993 + }, + { + "epoch": 1.5482866043613708, + "grad_norm": 0.5316071822548012, + "learning_rate": 1.2067235396703819e-06, + "loss": 0.2006, + "step": 994 + }, + { + "epoch": 1.5498442367601246, + "grad_norm": 0.4870862919772457, + "learning_rate": 1.198764795615221e-06, + "loss": 0.1721, + "step": 995 + }, + { + "epoch": 1.5514018691588785, + "grad_norm": 0.45786723638909316, + "learning_rate": 1.190828807470929e-06, + "loss": 0.2847, + "step": 996 + }, + { + "epoch": 1.5529595015576323, + "grad_norm": 0.5220478168315765, + "learning_rate": 1.182915622745912e-06, + "loss": 0.1942, + "step": 997 + }, + { + "epoch": 1.5545171339563861, + "grad_norm": 1.0355443091689227, + "learning_rate": 1.17502528881207e-06, + "loss": 0.6648, + "step": 998 + }, + { + "epoch": 1.5560747663551402, + "grad_norm": 0.6645917404798355, + "learning_rate": 1.1671578529045075e-06, + "loss": 0.1686, + "step": 999 + }, + { + "epoch": 1.557632398753894, + "grad_norm": 0.5772980090930082, + "learning_rate": 1.1593133621212454e-06, + "loss": 0.1601, + "step": 1000 + }, + { + "epoch": 1.557632398753894, + "eval_loss": 0.36974087357521057, + "eval_runtime": 2.7835, + "eval_samples_per_second": 9.341, + "eval_steps_per_second": 2.515, + "step": 1000 + }, + { + "epoch": 1.559190031152648, + "grad_norm": 0.6634169549696792, + "learning_rate": 1.151491863422951e-06, + "loss": 0.178, + "step": 1001 + }, + { + "epoch": 1.560747663551402, + "grad_norm": 0.6293088334490837, + "learning_rate": 1.1436934036326485e-06, + "loss": 0.2054, + "step": 1002 + }, + { + "epoch": 1.5623052959501558, + "grad_norm": 0.6633410484590091, + "learning_rate": 1.135918029435441e-06, + "loss": 0.2654, + "step": 1003 + }, + { + "epoch": 1.5638629283489096, + "grad_norm": 0.6622964143754947, + "learning_rate": 1.1281657873782287e-06, + "loss": 0.2793, + "step": 1004 + }, + { + "epoch": 1.5654205607476634, + "grad_norm": 0.6119616717415093, + "learning_rate": 1.1204367238694357e-06, + "loss": 0.2711, + "step": 1005 + }, + { + "epoch": 1.5669781931464173, + "grad_norm": 0.47949709495438925, + "learning_rate": 1.1127308851787244e-06, + "loss": 0.2021, + "step": 1006 + }, + { + "epoch": 1.5685358255451713, + "grad_norm": 0.6934647914219084, + "learning_rate": 1.1050483174367304e-06, + "loss": 0.203, + "step": 1007 + }, + { + "epoch": 1.5700934579439252, + "grad_norm": 0.5481726220781126, + "learning_rate": 1.0973890666347703e-06, + "loss": 0.2368, + "step": 1008 + }, + { + "epoch": 1.5716510903426792, + "grad_norm": 0.6509857110936647, + "learning_rate": 1.0897531786245819e-06, + "loss": 0.2125, + "step": 1009 + }, + { + "epoch": 1.573208722741433, + "grad_norm": 0.5617926695948265, + "learning_rate": 1.0821406991180367e-06, + "loss": 0.2398, + "step": 1010 + }, + { + "epoch": 1.574766355140187, + "grad_norm": 0.6111199895380683, + "learning_rate": 1.0745516736868766e-06, + "loss": 0.2485, + "step": 1011 + }, + { + "epoch": 1.5763239875389408, + "grad_norm": 0.7124396857915882, + "learning_rate": 1.0669861477624376e-06, + "loss": 0.2517, + "step": 1012 + }, + { + "epoch": 1.5778816199376946, + "grad_norm": 0.5990035454998449, + "learning_rate": 1.0594441666353722e-06, + "loss": 0.151, + "step": 1013 + }, + { + "epoch": 1.5794392523364484, + "grad_norm": 0.6520260923864126, + "learning_rate": 1.0519257754553868e-06, + "loss": 0.1888, + "step": 1014 + }, + { + "epoch": 1.5809968847352025, + "grad_norm": 0.7169934380455352, + "learning_rate": 1.0444310192309675e-06, + "loss": 0.2318, + "step": 1015 + }, + { + "epoch": 1.5825545171339563, + "grad_norm": 0.5993914903355098, + "learning_rate": 1.0369599428291116e-06, + "loss": 0.2124, + "step": 1016 + }, + { + "epoch": 1.5841121495327104, + "grad_norm": 0.49513044936794753, + "learning_rate": 1.0295125909750537e-06, + "loss": 0.2402, + "step": 1017 + }, + { + "epoch": 1.5856697819314642, + "grad_norm": 0.7226758243950167, + "learning_rate": 1.0220890082520074e-06, + "loss": 0.2398, + "step": 1018 + }, + { + "epoch": 1.587227414330218, + "grad_norm": 0.6288423009834784, + "learning_rate": 1.0146892391008929e-06, + "loss": 0.1948, + "step": 1019 + }, + { + "epoch": 1.588785046728972, + "grad_norm": 0.596604250578145, + "learning_rate": 1.0073133278200702e-06, + "loss": 0.232, + "step": 1020 + }, + { + "epoch": 1.5903426791277258, + "grad_norm": 0.6135147035860976, + "learning_rate": 9.999613185650748e-07, + "loss": 0.2614, + "step": 1021 + }, + { + "epoch": 1.5919003115264796, + "grad_norm": 0.5404711008426897, + "learning_rate": 9.926332553483547e-07, + "loss": 0.1885, + "step": 1022 + }, + { + "epoch": 1.5934579439252337, + "grad_norm": 0.7031398847787177, + "learning_rate": 9.853291820390087e-07, + "loss": 0.2325, + "step": 1023 + }, + { + "epoch": 1.5950155763239875, + "grad_norm": 0.6348294111355727, + "learning_rate": 9.780491423625154e-07, + "loss": 0.2385, + "step": 1024 + }, + { + "epoch": 1.5965732087227416, + "grad_norm": 0.5521768442130486, + "learning_rate": 9.70793179900484e-07, + "loss": 0.1988, + "step": 1025 + }, + { + "epoch": 1.5981308411214954, + "grad_norm": 0.613980141544231, + "learning_rate": 9.63561338090378e-07, + "loss": 0.1027, + "step": 1026 + }, + { + "epoch": 1.5996884735202492, + "grad_norm": 0.7720687456198224, + "learning_rate": 9.563536602252749e-07, + "loss": 0.2905, + "step": 1027 + }, + { + "epoch": 1.601246105919003, + "grad_norm": 0.6319851908391722, + "learning_rate": 9.49170189453586e-07, + "loss": 0.2617, + "step": 1028 + }, + { + "epoch": 1.602803738317757, + "grad_norm": 0.6736383488893641, + "learning_rate": 9.420109687788148e-07, + "loss": 0.2641, + "step": 1029 + }, + { + "epoch": 1.6043613707165107, + "grad_norm": 0.6656334468054872, + "learning_rate": 9.348760410592855e-07, + "loss": 0.2171, + "step": 1030 + }, + { + "epoch": 1.6059190031152648, + "grad_norm": 0.6957076685070199, + "learning_rate": 9.277654490079035e-07, + "loss": 0.236, + "step": 1031 + }, + { + "epoch": 1.6074766355140186, + "grad_norm": 0.6288117317451123, + "learning_rate": 9.206792351918809e-07, + "loss": 0.1562, + "step": 1032 + }, + { + "epoch": 1.6090342679127727, + "grad_norm": 0.6616920441316214, + "learning_rate": 9.136174420324962e-07, + "loss": 0.218, + "step": 1033 + }, + { + "epoch": 1.6105919003115265, + "grad_norm": 0.6342551825259773, + "learning_rate": 9.065801118048312e-07, + "loss": 0.2252, + "step": 1034 + }, + { + "epoch": 1.6121495327102804, + "grad_norm": 0.5589154301426593, + "learning_rate": 8.995672866375237e-07, + "loss": 0.1551, + "step": 1035 + }, + { + "epoch": 1.6137071651090342, + "grad_norm": 0.4635682927354973, + "learning_rate": 8.925790085125152e-07, + "loss": 0.1833, + "step": 1036 + }, + { + "epoch": 1.615264797507788, + "grad_norm": 0.5543844541586774, + "learning_rate": 8.856153192647909e-07, + "loss": 0.2067, + "step": 1037 + }, + { + "epoch": 1.616822429906542, + "grad_norm": 0.6110967305529611, + "learning_rate": 8.786762605821419e-07, + "loss": 0.2331, + "step": 1038 + }, + { + "epoch": 1.618380062305296, + "grad_norm": 0.6930362377044883, + "learning_rate": 8.717618740049083e-07, + "loss": 0.2451, + "step": 1039 + }, + { + "epoch": 1.6199376947040498, + "grad_norm": 0.8137005439911986, + "learning_rate": 8.648722009257315e-07, + "loss": 0.1626, + "step": 1040 + }, + { + "epoch": 1.6214953271028039, + "grad_norm": 0.7004708747608873, + "learning_rate": 8.580072825893049e-07, + "loss": 0.1831, + "step": 1041 + }, + { + "epoch": 1.6230529595015577, + "grad_norm": 0.607273962654228, + "learning_rate": 8.511671600921306e-07, + "loss": 0.161, + "step": 1042 + }, + { + "epoch": 1.6246105919003115, + "grad_norm": 0.5132546282344246, + "learning_rate": 8.443518743822726e-07, + "loss": 0.2764, + "step": 1043 + }, + { + "epoch": 1.6261682242990654, + "grad_norm": 0.6893277741210059, + "learning_rate": 8.375614662591097e-07, + "loss": 0.1793, + "step": 1044 + }, + { + "epoch": 1.6277258566978192, + "grad_norm": 0.46872986327742755, + "learning_rate": 8.307959763730899e-07, + "loss": 0.1761, + "step": 1045 + }, + { + "epoch": 1.629283489096573, + "grad_norm": 0.5290839127332908, + "learning_rate": 8.240554452254929e-07, + "loss": 0.2329, + "step": 1046 + }, + { + "epoch": 1.6308411214953271, + "grad_norm": 0.6109039006881162, + "learning_rate": 8.173399131681831e-07, + "loss": 0.2121, + "step": 1047 + }, + { + "epoch": 1.632398753894081, + "grad_norm": 0.6091367601010639, + "learning_rate": 8.106494204033677e-07, + "loss": 0.1873, + "step": 1048 + }, + { + "epoch": 1.633956386292835, + "grad_norm": 0.5867126273845332, + "learning_rate": 8.039840069833594e-07, + "loss": 0.7795, + "step": 1049 + }, + { + "epoch": 1.6355140186915889, + "grad_norm": 0.5685700476106482, + "learning_rate": 7.973437128103306e-07, + "loss": 0.2459, + "step": 1050 + }, + { + "epoch": 1.6370716510903427, + "grad_norm": 0.5135498312661578, + "learning_rate": 7.907285776360863e-07, + "loss": 0.255, + "step": 1051 + }, + { + "epoch": 1.6386292834890965, + "grad_norm": 0.6667857649972504, + "learning_rate": 7.841386410618096e-07, + "loss": 0.2476, + "step": 1052 + }, + { + "epoch": 1.6401869158878504, + "grad_norm": 0.5960262952580946, + "learning_rate": 7.775739425378398e-07, + "loss": 0.2462, + "step": 1053 + }, + { + "epoch": 1.6417445482866042, + "grad_norm": 0.8148918825910239, + "learning_rate": 7.710345213634247e-07, + "loss": 0.2342, + "step": 1054 + }, + { + "epoch": 1.6433021806853583, + "grad_norm": 0.6117412041332031, + "learning_rate": 7.645204166864967e-07, + "loss": 0.1889, + "step": 1055 + }, + { + "epoch": 1.644859813084112, + "grad_norm": 0.6194608199292435, + "learning_rate": 7.580316675034255e-07, + "loss": 0.1807, + "step": 1056 + }, + { + "epoch": 1.6464174454828662, + "grad_norm": 0.9323712553721532, + "learning_rate": 7.515683126587975e-07, + "loss": 0.1879, + "step": 1057 + }, + { + "epoch": 1.64797507788162, + "grad_norm": 0.5493635025344051, + "learning_rate": 7.451303908451707e-07, + "loss": 0.2106, + "step": 1058 + }, + { + "epoch": 1.6495327102803738, + "grad_norm": 0.7158277912716375, + "learning_rate": 7.387179406028549e-07, + "loss": 0.2354, + "step": 1059 + }, + { + "epoch": 1.6510903426791277, + "grad_norm": 0.5571552725370883, + "learning_rate": 7.323310003196749e-07, + "loss": 0.2145, + "step": 1060 + }, + { + "epoch": 1.6526479750778815, + "grad_norm": 0.7538580273599934, + "learning_rate": 7.259696082307372e-07, + "loss": 0.2482, + "step": 1061 + }, + { + "epoch": 1.6542056074766354, + "grad_norm": 0.5327326528906995, + "learning_rate": 7.196338024182098e-07, + "loss": 0.2354, + "step": 1062 + }, + { + "epoch": 1.6557632398753894, + "grad_norm": 0.557287992992955, + "learning_rate": 7.133236208110878e-07, + "loss": 0.1693, + "step": 1063 + }, + { + "epoch": 1.6573208722741433, + "grad_norm": 0.5104215870170742, + "learning_rate": 7.070391011849698e-07, + "loss": 0.1717, + "step": 1064 + }, + { + "epoch": 1.6588785046728973, + "grad_norm": 0.5917806139945608, + "learning_rate": 7.007802811618258e-07, + "loss": 0.2085, + "step": 1065 + }, + { + "epoch": 1.6604361370716512, + "grad_norm": 0.6306946462467933, + "learning_rate": 6.945471982097818e-07, + "loss": 0.2145, + "step": 1066 + }, + { + "epoch": 1.661993769470405, + "grad_norm": 0.7196660102146798, + "learning_rate": 6.883398896428867e-07, + "loss": 0.2321, + "step": 1067 + }, + { + "epoch": 1.6635514018691588, + "grad_norm": 0.532187998071398, + "learning_rate": 6.821583926208947e-07, + "loss": 0.2366, + "step": 1068 + }, + { + "epoch": 1.6651090342679127, + "grad_norm": 0.5827926596867945, + "learning_rate": 6.760027441490369e-07, + "loss": 0.285, + "step": 1069 + }, + { + "epoch": 1.6666666666666665, + "grad_norm": 0.7057955872693081, + "learning_rate": 6.698729810778065e-07, + "loss": 0.6118, + "step": 1070 + }, + { + "epoch": 1.6682242990654206, + "grad_norm": 0.7121732240730015, + "learning_rate": 6.637691401027352e-07, + "loss": 0.8544, + "step": 1071 + }, + { + "epoch": 1.6697819314641744, + "grad_norm": 0.49184886655898746, + "learning_rate": 6.576912577641697e-07, + "loss": 0.2325, + "step": 1072 + }, + { + "epoch": 1.6713395638629285, + "grad_norm": 0.6760569677497837, + "learning_rate": 6.516393704470608e-07, + "loss": 0.8096, + "step": 1073 + }, + { + "epoch": 1.6728971962616823, + "grad_norm": 0.7478627429100672, + "learning_rate": 6.456135143807369e-07, + "loss": 0.1594, + "step": 1074 + }, + { + "epoch": 1.6744548286604362, + "grad_norm": 0.5379337402997096, + "learning_rate": 6.396137256386975e-07, + "loss": 0.1994, + "step": 1075 + }, + { + "epoch": 1.67601246105919, + "grad_norm": 0.5032927663415819, + "learning_rate": 6.336400401383857e-07, + "loss": 0.2132, + "step": 1076 + }, + { + "epoch": 1.6775700934579438, + "grad_norm": 0.47582414487022096, + "learning_rate": 6.276924936409829e-07, + "loss": 0.2478, + "step": 1077 + }, + { + "epoch": 1.6791277258566977, + "grad_norm": 0.6434383172973982, + "learning_rate": 6.217711217511857e-07, + "loss": 0.2143, + "step": 1078 + }, + { + "epoch": 1.6806853582554517, + "grad_norm": 0.6604513508530203, + "learning_rate": 6.158759599170045e-07, + "loss": 0.19, + "step": 1079 + }, + { + "epoch": 1.6822429906542056, + "grad_norm": 0.5696994691557421, + "learning_rate": 6.100070434295379e-07, + "loss": 0.2234, + "step": 1080 + }, + { + "epoch": 1.6838006230529596, + "grad_norm": 0.668728026470969, + "learning_rate": 6.041644074227738e-07, + "loss": 0.1627, + "step": 1081 + }, + { + "epoch": 1.6853582554517135, + "grad_norm": 0.5959783592005928, + "learning_rate": 5.983480868733666e-07, + "loss": 0.2239, + "step": 1082 + }, + { + "epoch": 1.6869158878504673, + "grad_norm": 0.7134998370022319, + "learning_rate": 5.925581166004407e-07, + "loss": 0.1782, + "step": 1083 + }, + { + "epoch": 1.6884735202492211, + "grad_norm": 0.5805335392567044, + "learning_rate": 5.867945312653733e-07, + "loss": 0.1725, + "step": 1084 + }, + { + "epoch": 1.690031152647975, + "grad_norm": 0.938179646578548, + "learning_rate": 5.810573653715884e-07, + "loss": 0.6935, + "step": 1085 + }, + { + "epoch": 1.6915887850467288, + "grad_norm": 0.6255817452393061, + "learning_rate": 5.753466532643531e-07, + "loss": 0.2057, + "step": 1086 + }, + { + "epoch": 1.6931464174454829, + "grad_norm": 0.6164273521801443, + "learning_rate": 5.696624291305692e-07, + "loss": 0.2354, + "step": 1087 + }, + { + "epoch": 1.6947040498442367, + "grad_norm": 0.6024358987476546, + "learning_rate": 5.64004726998571e-07, + "loss": 0.1628, + "step": 1088 + }, + { + "epoch": 1.6962616822429908, + "grad_norm": 0.583734062156969, + "learning_rate": 5.583735807379165e-07, + "loss": 0.258, + "step": 1089 + }, + { + "epoch": 1.6978193146417446, + "grad_norm": 0.5381732861463082, + "learning_rate": 5.527690240591927e-07, + "loss": 0.259, + "step": 1090 + }, + { + "epoch": 1.6993769470404985, + "grad_norm": 0.6432738953861825, + "learning_rate": 5.471910905138062e-07, + "loss": 0.2233, + "step": 1091 + }, + { + "epoch": 1.7009345794392523, + "grad_norm": 0.7405741920054726, + "learning_rate": 5.416398134937878e-07, + "loss": 0.2732, + "step": 1092 + }, + { + "epoch": 1.7024922118380061, + "grad_norm": 0.5681851173547261, + "learning_rate": 5.361152262315883e-07, + "loss": 0.1976, + "step": 1093 + }, + { + "epoch": 1.70404984423676, + "grad_norm": 0.5473658208153401, + "learning_rate": 5.306173617998823e-07, + "loss": 0.2139, + "step": 1094 + }, + { + "epoch": 1.705607476635514, + "grad_norm": 0.5255770004268006, + "learning_rate": 5.251462531113705e-07, + "loss": 0.1707, + "step": 1095 + }, + { + "epoch": 1.7071651090342679, + "grad_norm": 0.5374196236910275, + "learning_rate": 5.197019329185787e-07, + "loss": 0.2281, + "step": 1096 + }, + { + "epoch": 1.708722741433022, + "grad_norm": 0.5260272455503919, + "learning_rate": 5.142844338136693e-07, + "loss": 0.2471, + "step": 1097 + }, + { + "epoch": 1.7102803738317758, + "grad_norm": 0.5646648215018458, + "learning_rate": 5.088937882282358e-07, + "loss": 0.1951, + "step": 1098 + }, + { + "epoch": 1.7118380062305296, + "grad_norm": 0.5458034326374251, + "learning_rate": 5.035300284331213e-07, + "loss": 0.2432, + "step": 1099 + }, + { + "epoch": 1.7133956386292835, + "grad_norm": 0.6037542349280097, + "learning_rate": 4.981931865382111e-07, + "loss": 0.2618, + "step": 1100 + }, + { + "epoch": 1.7149532710280373, + "grad_norm": 0.5948181143891571, + "learning_rate": 4.92883294492254e-07, + "loss": 0.1635, + "step": 1101 + }, + { + "epoch": 1.7165109034267911, + "grad_norm": 0.5776080511589572, + "learning_rate": 4.876003840826588e-07, + "loss": 0.2451, + "step": 1102 + }, + { + "epoch": 1.7180685358255452, + "grad_norm": 0.5026789603534472, + "learning_rate": 4.823444869353177e-07, + "loss": 0.1562, + "step": 1103 + }, + { + "epoch": 1.719626168224299, + "grad_norm": 0.5367686440727194, + "learning_rate": 4.771156345144018e-07, + "loss": 0.2041, + "step": 1104 + }, + { + "epoch": 1.721183800623053, + "grad_norm": 0.658875304880407, + "learning_rate": 4.7191385812218547e-07, + "loss": 0.7385, + "step": 1105 + }, + { + "epoch": 1.722741433021807, + "grad_norm": 0.6189314088368518, + "learning_rate": 4.6673918889884985e-07, + "loss": 0.269, + "step": 1106 + }, + { + "epoch": 1.7242990654205608, + "grad_norm": 0.7000827321303451, + "learning_rate": 4.615916578223029e-07, + "loss": 0.219, + "step": 1107 + }, + { + "epoch": 1.7258566978193146, + "grad_norm": 0.6218605150183268, + "learning_rate": 4.5647129570799196e-07, + "loss": 0.2765, + "step": 1108 + }, + { + "epoch": 1.7274143302180685, + "grad_norm": 0.7349959501432963, + "learning_rate": 4.513781332087153e-07, + "loss": 0.2457, + "step": 1109 + }, + { + "epoch": 1.7289719626168223, + "grad_norm": 0.7008089188891375, + "learning_rate": 4.463122008144449e-07, + "loss": 0.2083, + "step": 1110 + }, + { + "epoch": 1.7305295950155763, + "grad_norm": 0.5607384923036217, + "learning_rate": 4.4127352885214026e-07, + "loss": 0.2166, + "step": 1111 + }, + { + "epoch": 1.7320872274143302, + "grad_norm": 0.645885067353582, + "learning_rate": 4.362621474855688e-07, + "loss": 0.2312, + "step": 1112 + }, + { + "epoch": 1.7336448598130842, + "grad_norm": 1.0208563939368382, + "learning_rate": 4.3127808671512114e-07, + "loss": 0.8117, + "step": 1113 + }, + { + "epoch": 1.735202492211838, + "grad_norm": 0.6252228702704483, + "learning_rate": 4.263213763776369e-07, + "loss": 0.2111, + "step": 1114 + }, + { + "epoch": 1.736760124610592, + "grad_norm": 0.5295697965398305, + "learning_rate": 4.213920461462234e-07, + "loss": 0.1693, + "step": 1115 + }, + { + "epoch": 1.7383177570093458, + "grad_norm": 0.5321708104559851, + "learning_rate": 4.1649012553007795e-07, + "loss": 0.1598, + "step": 1116 + }, + { + "epoch": 1.7398753894080996, + "grad_norm": 0.6297092340917954, + "learning_rate": 4.116156438743102e-07, + "loss": 0.6563, + "step": 1117 + }, + { + "epoch": 1.7414330218068534, + "grad_norm": 0.6502646751207796, + "learning_rate": 4.067686303597701e-07, + "loss": 0.828, + "step": 1118 + }, + { + "epoch": 1.7429906542056075, + "grad_norm": 0.6299291575701418, + "learning_rate": 4.0194911400287084e-07, + "loss": 0.1618, + "step": 1119 + }, + { + "epoch": 1.7445482866043613, + "grad_norm": 0.6763449045114679, + "learning_rate": 3.971571236554117e-07, + "loss": 0.1755, + "step": 1120 + }, + { + "epoch": 1.7461059190031154, + "grad_norm": 0.6097387148482865, + "learning_rate": 3.923926880044132e-07, + "loss": 0.2144, + "step": 1121 + }, + { + "epoch": 1.7476635514018692, + "grad_norm": 0.6555185956249271, + "learning_rate": 3.876558355719373e-07, + "loss": 0.7322, + "step": 1122 + }, + { + "epoch": 1.749221183800623, + "grad_norm": 0.529186428154948, + "learning_rate": 3.8294659471492466e-07, + "loss": 0.2299, + "step": 1123 + }, + { + "epoch": 1.750778816199377, + "grad_norm": 0.5210926587424783, + "learning_rate": 3.7826499362501657e-07, + "loss": 0.7365, + "step": 1124 + }, + { + "epoch": 1.7523364485981308, + "grad_norm": 0.5705242040491778, + "learning_rate": 3.7361106032839266e-07, + "loss": 0.1769, + "step": 1125 + }, + { + "epoch": 1.7538940809968846, + "grad_norm": 0.6221568867702569, + "learning_rate": 3.6898482268559745e-07, + "loss": 0.2641, + "step": 1126 + }, + { + "epoch": 1.7554517133956387, + "grad_norm": 0.5761942177039651, + "learning_rate": 3.643863083913829e-07, + "loss": 0.1756, + "step": 1127 + }, + { + "epoch": 1.7570093457943925, + "grad_norm": 0.5598040118881634, + "learning_rate": 3.5981554497452886e-07, + "loss": 0.1968, + "step": 1128 + }, + { + "epoch": 1.7585669781931466, + "grad_norm": 0.6115168676953736, + "learning_rate": 3.552725597976925e-07, + "loss": 0.1656, + "step": 1129 + }, + { + "epoch": 1.7601246105919004, + "grad_norm": 0.5618155080059914, + "learning_rate": 3.507573800572328e-07, + "loss": 0.2284, + "step": 1130 + }, + { + "epoch": 1.7616822429906542, + "grad_norm": 0.5746401068264577, + "learning_rate": 3.462700327830559e-07, + "loss": 0.2942, + "step": 1131 + }, + { + "epoch": 1.763239875389408, + "grad_norm": 0.5812252313415237, + "learning_rate": 3.418105448384507e-07, + "loss": 0.2478, + "step": 1132 + }, + { + "epoch": 1.764797507788162, + "grad_norm": 0.5205912713872596, + "learning_rate": 3.3737894291992426e-07, + "loss": 0.1793, + "step": 1133 + }, + { + "epoch": 1.7663551401869158, + "grad_norm": 0.59827648712384, + "learning_rate": 3.32975253557048e-07, + "loss": 0.1643, + "step": 1134 + }, + { + "epoch": 1.7679127725856698, + "grad_norm": 0.5476307417528222, + "learning_rate": 3.2859950311229625e-07, + "loss": 0.2551, + "step": 1135 + }, + { + "epoch": 1.7694704049844237, + "grad_norm": 0.5912924193469346, + "learning_rate": 3.242517177808874e-07, + "loss": 0.2958, + "step": 1136 + }, + { + "epoch": 1.7710280373831777, + "grad_norm": 0.6149100468721329, + "learning_rate": 3.199319235906273e-07, + "loss": 0.2391, + "step": 1137 + }, + { + "epoch": 1.7725856697819315, + "grad_norm": 0.5135930360411095, + "learning_rate": 3.1564014640175613e-07, + "loss": 0.1651, + "step": 1138 + }, + { + "epoch": 1.7741433021806854, + "grad_norm": 0.545428012439618, + "learning_rate": 3.1137641190678967e-07, + "loss": 0.1866, + "step": 1139 + }, + { + "epoch": 1.7757009345794392, + "grad_norm": 0.7534351814752701, + "learning_rate": 3.0714074563037043e-07, + "loss": 0.1752, + "step": 1140 + }, + { + "epoch": 1.777258566978193, + "grad_norm": 0.6368630911991421, + "learning_rate": 3.029331729291074e-07, + "loss": 0.2049, + "step": 1141 + }, + { + "epoch": 1.778816199376947, + "grad_norm": 0.594879170975228, + "learning_rate": 2.9875371899143246e-07, + "loss": 0.6515, + "step": 1142 + }, + { + "epoch": 1.780373831775701, + "grad_norm": 0.5107491983811178, + "learning_rate": 2.9460240883744496e-07, + "loss": 0.1768, + "step": 1143 + }, + { + "epoch": 1.7819314641744548, + "grad_norm": 0.5417424283009235, + "learning_rate": 2.9047926731876177e-07, + "loss": 0.2087, + "step": 1144 + }, + { + "epoch": 1.7834890965732089, + "grad_norm": 0.6798250278910997, + "learning_rate": 2.8638431911837126e-07, + "loss": 0.28, + "step": 1145 + }, + { + "epoch": 1.7850467289719627, + "grad_norm": 1.0039697477055525, + "learning_rate": 2.8231758875048087e-07, + "loss": 0.2178, + "step": 1146 + }, + { + "epoch": 1.7866043613707165, + "grad_norm": 0.6262879884675681, + "learning_rate": 2.7827910056037857e-07, + "loss": 0.1775, + "step": 1147 + }, + { + "epoch": 1.7881619937694704, + "grad_norm": 0.6215897703245564, + "learning_rate": 2.742688787242764e-07, + "loss": 0.1726, + "step": 1148 + }, + { + "epoch": 1.7897196261682242, + "grad_norm": 0.7119013996583601, + "learning_rate": 2.702869472491748e-07, + "loss": 0.6972, + "step": 1149 + }, + { + "epoch": 1.791277258566978, + "grad_norm": 0.5300668663016045, + "learning_rate": 2.6633332997271277e-07, + "loss": 0.1797, + "step": 1150 + }, + { + "epoch": 1.7928348909657321, + "grad_norm": 0.6010057607669876, + "learning_rate": 2.624080505630311e-07, + "loss": 0.2206, + "step": 1151 + }, + { + "epoch": 1.794392523364486, + "grad_norm": 0.5566227596606641, + "learning_rate": 2.585111325186235e-07, + "loss": 0.2991, + "step": 1152 + }, + { + "epoch": 1.79595015576324, + "grad_norm": 0.7373120599532028, + "learning_rate": 2.5464259916820276e-07, + "loss": 0.2155, + "step": 1153 + }, + { + "epoch": 1.7975077881619939, + "grad_norm": 0.5444732937470245, + "learning_rate": 2.508024736705561e-07, + "loss": 0.2394, + "step": 1154 + }, + { + "epoch": 1.7990654205607477, + "grad_norm": 0.6187155054450917, + "learning_rate": 2.469907790144088e-07, + "loss": 0.1835, + "step": 1155 + }, + { + "epoch": 1.8006230529595015, + "grad_norm": 0.45905420748321907, + "learning_rate": 2.4320753801828853e-07, + "loss": 0.1604, + "step": 1156 + }, + { + "epoch": 1.8021806853582554, + "grad_norm": 0.43879992123704614, + "learning_rate": 2.394527733303831e-07, + "loss": 0.1723, + "step": 1157 + }, + { + "epoch": 1.8037383177570092, + "grad_norm": 0.5693210977954104, + "learning_rate": 2.3572650742840985e-07, + "loss": 0.1148, + "step": 1158 + }, + { + "epoch": 1.8052959501557633, + "grad_norm": 0.5824148680871707, + "learning_rate": 2.3202876261948004e-07, + "loss": 0.2304, + "step": 1159 + }, + { + "epoch": 1.8068535825545171, + "grad_norm": 0.692163156120945, + "learning_rate": 2.2835956103996525e-07, + "loss": 0.1877, + "step": 1160 + }, + { + "epoch": 1.8084112149532712, + "grad_norm": 0.627351603757966, + "learning_rate": 2.247189246553616e-07, + "loss": 0.2133, + "step": 1161 + }, + { + "epoch": 1.809968847352025, + "grad_norm": 0.6447153050268107, + "learning_rate": 2.2110687526016418e-07, + "loss": 0.1753, + "step": 1162 + }, + { + "epoch": 1.8115264797507789, + "grad_norm": 0.6498742455536043, + "learning_rate": 2.1752343447773105e-07, + "loss": 0.2801, + "step": 1163 + }, + { + "epoch": 1.8130841121495327, + "grad_norm": 0.5556829005135521, + "learning_rate": 2.1396862376015904e-07, + "loss": 0.2613, + "step": 1164 + }, + { + "epoch": 1.8146417445482865, + "grad_norm": 0.5643429928213758, + "learning_rate": 2.1044246438814918e-07, + "loss": 0.2529, + "step": 1165 + }, + { + "epoch": 1.8161993769470404, + "grad_norm": 0.747941354496335, + "learning_rate": 2.0694497747088428e-07, + "loss": 0.2321, + "step": 1166 + }, + { + "epoch": 1.8177570093457944, + "grad_norm": 0.6102806034003578, + "learning_rate": 2.034761839459015e-07, + "loss": 0.218, + "step": 1167 + }, + { + "epoch": 1.8193146417445483, + "grad_norm": 0.7249346044063573, + "learning_rate": 2.0003610457896506e-07, + "loss": 0.2317, + "step": 1168 + }, + { + "epoch": 1.8208722741433023, + "grad_norm": 0.5858879975789372, + "learning_rate": 1.966247599639448e-07, + "loss": 0.1886, + "step": 1169 + }, + { + "epoch": 1.8224299065420562, + "grad_norm": 0.458859760014875, + "learning_rate": 1.9324217052268835e-07, + "loss": 0.2037, + "step": 1170 + }, + { + "epoch": 1.82398753894081, + "grad_norm": 0.6009222197974706, + "learning_rate": 1.8988835650490645e-07, + "loss": 0.1845, + "step": 1171 + }, + { + "epoch": 1.8255451713395638, + "grad_norm": 0.7980290452291322, + "learning_rate": 1.865633379880427e-07, + "loss": 0.2341, + "step": 1172 + }, + { + "epoch": 1.8271028037383177, + "grad_norm": 0.5432836689095761, + "learning_rate": 1.832671348771614e-07, + "loss": 0.2282, + "step": 1173 + }, + { + "epoch": 1.8286604361370715, + "grad_norm": 0.628743562125201, + "learning_rate": 1.799997669048209e-07, + "loss": 0.261, + "step": 1174 + }, + { + "epoch": 1.8302180685358256, + "grad_norm": 0.5832048925707848, + "learning_rate": 1.7676125363096431e-07, + "loss": 0.2157, + "step": 1175 + }, + { + "epoch": 1.8317757009345794, + "grad_norm": 0.6281683541540782, + "learning_rate": 1.7355161444279346e-07, + "loss": 0.2799, + "step": 1176 + }, + { + "epoch": 1.8333333333333335, + "grad_norm": 0.6990945601104308, + "learning_rate": 1.7037086855465902e-07, + "loss": 0.2248, + "step": 1177 + }, + { + "epoch": 1.8348909657320873, + "grad_norm": 0.48096314961131165, + "learning_rate": 1.6721903500794112e-07, + "loss": 0.2014, + "step": 1178 + }, + { + "epoch": 1.8364485981308412, + "grad_norm": 0.6720925885316696, + "learning_rate": 1.6409613267094006e-07, + "loss": 0.1839, + "step": 1179 + }, + { + "epoch": 1.838006230529595, + "grad_norm": 0.5023236174498488, + "learning_rate": 1.6100218023876013e-07, + "loss": 0.1729, + "step": 1180 + }, + { + "epoch": 1.8395638629283488, + "grad_norm": 0.6472364425599902, + "learning_rate": 1.5793719623319713e-07, + "loss": 0.2147, + "step": 1181 + }, + { + "epoch": 1.8411214953271027, + "grad_norm": 0.5750750734641096, + "learning_rate": 1.5490119900263e-07, + "loss": 0.1734, + "step": 1182 + }, + { + "epoch": 1.8426791277258567, + "grad_norm": 0.640400103725089, + "learning_rate": 1.5189420672190924e-07, + "loss": 0.2149, + "step": 1183 + }, + { + "epoch": 1.8442367601246106, + "grad_norm": 0.6159833044318871, + "learning_rate": 1.489162373922498e-07, + "loss": 0.2426, + "step": 1184 + }, + { + "epoch": 1.8457943925233646, + "grad_norm": 0.5891969287939168, + "learning_rate": 1.4596730884112008e-07, + "loss": 0.2164, + "step": 1185 + }, + { + "epoch": 1.8473520249221185, + "grad_norm": 0.6060235472721057, + "learning_rate": 1.4304743872213868e-07, + "loss": 0.2567, + "step": 1186 + }, + { + "epoch": 1.8489096573208723, + "grad_norm": 0.6985533236531171, + "learning_rate": 1.4015664451496713e-07, + "loss": 0.1688, + "step": 1187 + }, + { + "epoch": 1.8504672897196262, + "grad_norm": 0.5859326752612632, + "learning_rate": 1.372949435252058e-07, + "loss": 0.2311, + "step": 1188 + }, + { + "epoch": 1.85202492211838, + "grad_norm": 0.7294577853076925, + "learning_rate": 1.3446235288428867e-07, + "loss": 0.2023, + "step": 1189 + }, + { + "epoch": 1.8535825545171338, + "grad_norm": 0.6888020410981194, + "learning_rate": 1.31658889549382e-07, + "loss": 0.2295, + "step": 1190 + }, + { + "epoch": 1.855140186915888, + "grad_norm": 0.6480785185138133, + "learning_rate": 1.288845703032854e-07, + "loss": 0.2359, + "step": 1191 + }, + { + "epoch": 1.8566978193146417, + "grad_norm": 0.7015546454899397, + "learning_rate": 1.2613941175432577e-07, + "loss": 0.2293, + "step": 1192 + }, + { + "epoch": 1.8582554517133958, + "grad_norm": 0.5363209042476926, + "learning_rate": 1.2342343033626248e-07, + "loss": 0.18, + "step": 1193 + }, + { + "epoch": 1.8598130841121496, + "grad_norm": 0.6459401688040521, + "learning_rate": 1.2073664230818571e-07, + "loss": 0.1598, + "step": 1194 + }, + { + "epoch": 1.8613707165109035, + "grad_norm": 0.638563320329426, + "learning_rate": 1.180790637544249e-07, + "loss": 0.2476, + "step": 1195 + }, + { + "epoch": 1.8629283489096573, + "grad_norm": 0.6002853300206833, + "learning_rate": 1.1545071058444324e-07, + "loss": 0.2199, + "step": 1196 + }, + { + "epoch": 1.8644859813084111, + "grad_norm": 0.5764195151127395, + "learning_rate": 1.1285159853275107e-07, + "loss": 0.1227, + "step": 1197 + }, + { + "epoch": 1.866043613707165, + "grad_norm": 0.6123103816053441, + "learning_rate": 1.1028174315880657e-07, + "loss": 0.815, + "step": 1198 + }, + { + "epoch": 1.867601246105919, + "grad_norm": 0.8123905890394445, + "learning_rate": 1.0774115984692523e-07, + "loss": 0.7364, + "step": 1199 + }, + { + "epoch": 1.8691588785046729, + "grad_norm": 0.5718174425382163, + "learning_rate": 1.0522986380618606e-07, + "loss": 0.2227, + "step": 1200 + }, + { + "epoch": 1.870716510903427, + "grad_norm": 0.5280345967170433, + "learning_rate": 1.0274787007034215e-07, + "loss": 0.232, + "step": 1201 + }, + { + "epoch": 1.8722741433021808, + "grad_norm": 0.6572165953398976, + "learning_rate": 1.0029519349772754e-07, + "loss": 0.2738, + "step": 1202 + }, + { + "epoch": 1.8738317757009346, + "grad_norm": 0.5244811712323645, + "learning_rate": 9.787184877117328e-08, + "loss": 0.2594, + "step": 1203 + }, + { + "epoch": 1.8753894080996885, + "grad_norm": 0.6676225172095591, + "learning_rate": 9.547785039791535e-08, + "loss": 0.1547, + "step": 1204 + }, + { + "epoch": 1.8769470404984423, + "grad_norm": 0.5476881051496193, + "learning_rate": 9.311321270950801e-08, + "loss": 0.2128, + "step": 1205 + }, + { + "epoch": 1.8785046728971961, + "grad_norm": 0.5345787062303978, + "learning_rate": 9.077794986174226e-08, + "loss": 0.229, + "step": 1206 + }, + { + "epoch": 1.8800623052959502, + "grad_norm": 0.6008670505158255, + "learning_rate": 8.84720758345553e-08, + "loss": 0.2073, + "step": 1207 + }, + { + "epoch": 1.881619937694704, + "grad_norm": 0.6417543491732723, + "learning_rate": 8.619560443195174e-08, + "loss": 0.2276, + "step": 1208 + }, + { + "epoch": 1.883177570093458, + "grad_norm": 0.5552051709339759, + "learning_rate": 8.394854928191587e-08, + "loss": 0.2693, + "step": 1209 + }, + { + "epoch": 1.884735202492212, + "grad_norm": 0.6694794375263459, + "learning_rate": 8.173092383633563e-08, + "loss": 0.1767, + "step": 1210 + }, + { + "epoch": 1.8862928348909658, + "grad_norm": 0.5551294915003306, + "learning_rate": 7.954274137091877e-08, + "loss": 0.147, + "step": 1211 + }, + { + "epoch": 1.8878504672897196, + "grad_norm": 0.534343522912461, + "learning_rate": 7.738401498511406e-08, + "loss": 0.1891, + "step": 1212 + }, + { + "epoch": 1.8894080996884735, + "grad_norm": 0.6889315198186039, + "learning_rate": 7.525475760203239e-08, + "loss": 0.2832, + "step": 1213 + }, + { + "epoch": 1.8909657320872273, + "grad_norm": 0.6872767665873238, + "learning_rate": 7.315498196836968e-08, + "loss": 0.1472, + "step": 1214 + }, + { + "epoch": 1.8925233644859814, + "grad_norm": 0.6612078267914279, + "learning_rate": 7.108470065433193e-08, + "loss": 0.2704, + "step": 1215 + }, + { + "epoch": 1.8940809968847352, + "grad_norm": 0.560144578359172, + "learning_rate": 6.904392605355803e-08, + "loss": 0.2943, + "step": 1216 + }, + { + "epoch": 1.8956386292834893, + "grad_norm": 0.590357610353342, + "learning_rate": 6.703267038304706e-08, + "loss": 0.229, + "step": 1217 + }, + { + "epoch": 1.897196261682243, + "grad_norm": 0.496234632076301, + "learning_rate": 6.505094568308223e-08, + "loss": 0.2807, + "step": 1218 + }, + { + "epoch": 1.898753894080997, + "grad_norm": 0.6744039849106092, + "learning_rate": 6.309876381716429e-08, + "loss": 0.1744, + "step": 1219 + }, + { + "epoch": 1.9003115264797508, + "grad_norm": 0.6282540133425786, + "learning_rate": 6.117613647193543e-08, + "loss": 0.1896, + "step": 1220 + }, + { + "epoch": 1.9018691588785046, + "grad_norm": 0.545913837904785, + "learning_rate": 5.928307515711107e-08, + "loss": 0.1798, + "step": 1221 + }, + { + "epoch": 1.9034267912772584, + "grad_norm": 0.5750165821891594, + "learning_rate": 5.741959120541096e-08, + "loss": 0.2328, + "step": 1222 + }, + { + "epoch": 1.9049844236760125, + "grad_norm": 0.6138340557718204, + "learning_rate": 5.558569577249207e-08, + "loss": 0.1179, + "step": 1223 + }, + { + "epoch": 1.9065420560747663, + "grad_norm": 0.5250776885173793, + "learning_rate": 5.378139983688135e-08, + "loss": 0.178, + "step": 1224 + }, + { + "epoch": 1.9080996884735204, + "grad_norm": 0.5353000767702344, + "learning_rate": 5.2006714199908106e-08, + "loss": 0.1881, + "step": 1225 + }, + { + "epoch": 1.9096573208722742, + "grad_norm": 0.6835275834635214, + "learning_rate": 5.026164948564283e-08, + "loss": 0.2075, + "step": 1226 + }, + { + "epoch": 1.911214953271028, + "grad_norm": 0.48910598318494225, + "learning_rate": 4.854621614083122e-08, + "loss": 0.1864, + "step": 1227 + }, + { + "epoch": 1.912772585669782, + "grad_norm": 0.5349299444820935, + "learning_rate": 4.686042443483196e-08, + "loss": 0.297, + "step": 1228 + }, + { + "epoch": 1.9143302180685358, + "grad_norm": 0.42493561251864087, + "learning_rate": 4.520428445955571e-08, + "loss": 0.1656, + "step": 1229 + }, + { + "epoch": 1.9158878504672896, + "grad_norm": 0.5786034487132414, + "learning_rate": 4.357780612940343e-08, + "loss": 0.2243, + "step": 1230 + }, + { + "epoch": 1.9174454828660437, + "grad_norm": 0.6628360349913834, + "learning_rate": 4.1980999181210344e-08, + "loss": 0.237, + "step": 1231 + }, + { + "epoch": 1.9190031152647975, + "grad_norm": 0.5726232559997532, + "learning_rate": 4.041387317418377e-08, + "loss": 0.19, + "step": 1232 + }, + { + "epoch": 1.9205607476635516, + "grad_norm": 0.5957569462038291, + "learning_rate": 3.88764374898476e-08, + "loss": 0.1922, + "step": 1233 + }, + { + "epoch": 1.9221183800623054, + "grad_norm": 0.5981083547680797, + "learning_rate": 3.736870133198622e-08, + "loss": 0.1861, + "step": 1234 + }, + { + "epoch": 1.9236760124610592, + "grad_norm": 0.5039034286835927, + "learning_rate": 3.589067372658961e-08, + "loss": 0.1587, + "step": 1235 + }, + { + "epoch": 1.925233644859813, + "grad_norm": 0.5946390323111295, + "learning_rate": 3.444236352179831e-08, + "loss": 0.1658, + "step": 1236 + }, + { + "epoch": 1.926791277258567, + "grad_norm": 0.595459693543413, + "learning_rate": 3.302377938785128e-08, + "loss": 0.1509, + "step": 1237 + }, + { + "epoch": 1.9283489096573208, + "grad_norm": 0.6496361440227659, + "learning_rate": 3.163492981703431e-08, + "loss": 0.693, + "step": 1238 + }, + { + "epoch": 1.9299065420560748, + "grad_norm": 0.6473442377574977, + "learning_rate": 3.027582312362776e-08, + "loss": 0.2082, + "step": 1239 + }, + { + "epoch": 1.9314641744548287, + "grad_norm": 0.6541508019021366, + "learning_rate": 2.8946467443858873e-08, + "loss": 0.1911, + "step": 1240 + }, + { + "epoch": 1.9330218068535827, + "grad_norm": 0.5343926005691645, + "learning_rate": 2.764687073585015e-08, + "loss": 0.1724, + "step": 1241 + }, + { + "epoch": 1.9345794392523366, + "grad_norm": 0.6477434345058528, + "learning_rate": 2.6377040779574924e-08, + "loss": 0.2109, + "step": 1242 + }, + { + "epoch": 1.9361370716510904, + "grad_norm": 0.4788618278691479, + "learning_rate": 2.5136985176809625e-08, + "loss": 0.2257, + "step": 1243 + }, + { + "epoch": 1.9376947040498442, + "grad_norm": 0.7161454071348646, + "learning_rate": 2.3926711351086594e-08, + "loss": 0.2201, + "step": 1244 + }, + { + "epoch": 1.939252336448598, + "grad_norm": 0.6162622985277744, + "learning_rate": 2.2746226547651352e-08, + "loss": 0.2353, + "step": 1245 + }, + { + "epoch": 1.940809968847352, + "grad_norm": 0.6572303875322422, + "learning_rate": 2.1595537833419276e-08, + "loss": 0.1238, + "step": 1246 + }, + { + "epoch": 1.942367601246106, + "grad_norm": 0.4905959486856584, + "learning_rate": 2.047465209693289e-08, + "loss": 0.1913, + "step": 1247 + }, + { + "epoch": 1.9439252336448598, + "grad_norm": 0.6262698157072888, + "learning_rate": 1.9383576048320752e-08, + "loss": 0.2135, + "step": 1248 + }, + { + "epoch": 1.9454828660436139, + "grad_norm": 0.5977256427085367, + "learning_rate": 1.8322316219256396e-08, + "loss": 0.3103, + "step": 1249 + }, + { + "epoch": 1.9470404984423677, + "grad_norm": 0.5597696638121235, + "learning_rate": 1.7290878962920587e-08, + "loss": 0.2003, + "step": 1250 + }, + { + "epoch": 1.9485981308411215, + "grad_norm": 0.7414334515835818, + "learning_rate": 1.6289270453963e-08, + "loss": 0.133, + "step": 1251 + }, + { + "epoch": 1.9501557632398754, + "grad_norm": 0.6577156096873663, + "learning_rate": 1.531749668846394e-08, + "loss": 0.1938, + "step": 1252 + }, + { + "epoch": 1.9517133956386292, + "grad_norm": 0.44233229164345883, + "learning_rate": 1.4375563483901011e-08, + "loss": 0.2196, + "step": 1253 + }, + { + "epoch": 1.953271028037383, + "grad_norm": 0.5494650237391789, + "learning_rate": 1.3463476479110837e-08, + "loss": 0.239, + "step": 1254 + }, + { + "epoch": 1.9548286604361371, + "grad_norm": 0.4597714878638595, + "learning_rate": 1.2581241134258515e-08, + "loss": 0.1569, + "step": 1255 + }, + { + "epoch": 1.956386292834891, + "grad_norm": 0.5344018379047227, + "learning_rate": 1.1728862730803759e-08, + "loss": 0.2506, + "step": 1256 + }, + { + "epoch": 1.957943925233645, + "grad_norm": 0.6170756968114266, + "learning_rate": 1.0906346371468147e-08, + "loss": 0.1756, + "step": 1257 + }, + { + "epoch": 1.9595015576323989, + "grad_norm": 0.7298638061579327, + "learning_rate": 1.011369698020681e-08, + "loss": 1.2315, + "step": 1258 + }, + { + "epoch": 1.9610591900311527, + "grad_norm": 0.7726214050156128, + "learning_rate": 9.350919302176242e-09, + "loss": 0.2132, + "step": 1259 + }, + { + "epoch": 1.9626168224299065, + "grad_norm": 0.7662101654548159, + "learning_rate": 8.618017903708198e-09, + "loss": 0.1979, + "step": 1260 + }, + { + "epoch": 1.9641744548286604, + "grad_norm": 0.7759433714405526, + "learning_rate": 7.914997172282502e-09, + "loss": 0.7825, + "step": 1261 + }, + { + "epoch": 1.9657320872274142, + "grad_norm": 0.5561454562523979, + "learning_rate": 7.241861316497623e-09, + "loss": 0.1787, + "step": 1262 + }, + { + "epoch": 1.9672897196261683, + "grad_norm": 0.6088898793952118, + "learning_rate": 6.598614366049027e-09, + "loss": 0.1525, + "step": 1263 + }, + { + "epoch": 1.9688473520249221, + "grad_norm": 0.6388680112674057, + "learning_rate": 5.985260171704199e-09, + "loss": 0.2538, + "step": 1264 + }, + { + "epoch": 1.9704049844236762, + "grad_norm": 1.2537539847150645, + "learning_rate": 5.401802405277656e-09, + "loss": 0.208, + "step": 1265 + }, + { + "epoch": 1.97196261682243, + "grad_norm": 0.5173311007016309, + "learning_rate": 4.848244559610971e-09, + "loss": 0.1927, + "step": 1266 + }, + { + "epoch": 1.9735202492211839, + "grad_norm": 0.7196153604341586, + "learning_rate": 4.324589948552227e-09, + "loss": 0.1292, + "step": 1267 + }, + { + "epoch": 1.9750778816199377, + "grad_norm": 0.8257531526111838, + "learning_rate": 3.830841706934374e-09, + "loss": 0.2268, + "step": 1268 + }, + { + "epoch": 1.9766355140186915, + "grad_norm": 0.6450834847941774, + "learning_rate": 3.367002790556906e-09, + "loss": 0.8063, + "step": 1269 + }, + { + "epoch": 1.9781931464174454, + "grad_norm": 0.5873649988075496, + "learning_rate": 2.9330759761692086e-09, + "loss": 0.1333, + "step": 1270 + }, + { + "epoch": 1.9797507788161994, + "grad_norm": 0.5338211522410232, + "learning_rate": 2.5290638614533516e-09, + "loss": 0.1651, + "step": 1271 + }, + { + "epoch": 1.9813084112149533, + "grad_norm": 0.5778154838677994, + "learning_rate": 2.154968865007989e-09, + "loss": 0.1375, + "step": 1272 + }, + { + "epoch": 1.9828660436137073, + "grad_norm": 0.6159025357834766, + "learning_rate": 1.8107932263350392e-09, + "loss": 0.2551, + "step": 1273 + }, + { + "epoch": 1.9844236760124612, + "grad_norm": 0.5276126425838753, + "learning_rate": 1.4965390058258035e-09, + "loss": 0.2857, + "step": 1274 + }, + { + "epoch": 1.985981308411215, + "grad_norm": 0.578702087814479, + "learning_rate": 1.2122080847470907e-09, + "loss": 0.1644, + "step": 1275 + }, + { + "epoch": 1.9875389408099688, + "grad_norm": 0.6156843300297827, + "learning_rate": 9.578021652323356e-10, + "loss": 0.203, + "step": 1276 + }, + { + "epoch": 1.9890965732087227, + "grad_norm": 0.5234412510622443, + "learning_rate": 7.3332277027105e-10, + "loss": 0.2086, + "step": 1277 + }, + { + "epoch": 1.9906542056074765, + "grad_norm": 0.6889557509874759, + "learning_rate": 5.38771243697167e-10, + "loss": 0.2412, + "step": 1278 + }, + { + "epoch": 1.9922118380062306, + "grad_norm": 0.5571525262541802, + "learning_rate": 3.7414875018515483e-10, + "loss": 0.314, + "step": 1279 + }, + { + "epoch": 1.9937694704049844, + "grad_norm": 0.48476554367462193, + "learning_rate": 2.3945627523891403e-10, + "loss": 0.1878, + "step": 1280 + }, + { + "epoch": 1.9953271028037385, + "grad_norm": 0.5759369634737167, + "learning_rate": 1.3469462518844733e-10, + "loss": 0.181, + "step": 1281 + }, + { + "epoch": 1.9968847352024923, + "grad_norm": 0.6640918787171276, + "learning_rate": 5.986442718486363e-11, + "loss": 0.1886, + "step": 1282 + }, + { + "epoch": 1.9984423676012462, + "grad_norm": 0.6150458129744069, + "learning_rate": 1.496612919482665e-11, + "loss": 0.2397, + "step": 1283 + }, + { + "epoch": 2.0, + "grad_norm": 0.6727485887005468, + "learning_rate": 0.0, + "loss": 0.2332, + "step": 1284 + }, + { + "epoch": 2.0, + "step": 1284, + "total_flos": 17255618273280.0, + "train_loss": 0.27858535300162723, + "train_runtime": 1476.77, + "train_samples_per_second": 3.477, + "train_steps_per_second": 0.869 + } + ], + "logging_steps": 1, + "max_steps": 1284, + "num_input_tokens_seen": 0, + "num_train_epochs": 2, + "save_steps": 70000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 17255618273280.0, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}