{ "best_metric": null, "best_model_checkpoint": null, "epoch": 5.0, "eval_steps": 100, "global_step": 4375, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0, "grad_norm": 5.6899882705644975, "learning_rate": 4.5454545454545455e-06, "loss": 4.9489, "step": 1 }, { "epoch": 0.0, "grad_norm": 5.313303192834888, "learning_rate": 9.090909090909091e-06, "loss": 5.0149, "step": 2 }, { "epoch": 0.0, "grad_norm": 4.671634148022431, "learning_rate": 1.3636363636363635e-05, "loss": 4.704, "step": 3 }, { "epoch": 0.0, "grad_norm": 5.83164039994204, "learning_rate": 1.8181818181818182e-05, "loss": 5.2202, "step": 4 }, { "epoch": 0.01, "grad_norm": 4.331876854370643, "learning_rate": 2.2727272727272726e-05, "loss": 4.6825, "step": 5 }, { "epoch": 0.01, "grad_norm": 3.9782447915358237, "learning_rate": 2.727272727272727e-05, "loss": 4.4191, "step": 6 }, { "epoch": 0.01, "grad_norm": 3.8447416541366186, "learning_rate": 3.1818181818181814e-05, "loss": 3.9869, "step": 7 }, { "epoch": 0.01, "grad_norm": 3.1799015160513977, "learning_rate": 3.6363636363636364e-05, "loss": 3.7894, "step": 8 }, { "epoch": 0.01, "grad_norm": 2.3703769109341994, "learning_rate": 4.09090909090909e-05, "loss": 3.4733, "step": 9 }, { "epoch": 0.01, "grad_norm": 1.9303396464547136, "learning_rate": 4.545454545454545e-05, "loss": 3.2632, "step": 10 }, { "epoch": 0.01, "grad_norm": 2.1207409557996533, "learning_rate": 4.9999999999999996e-05, "loss": 3.1457, "step": 11 }, { "epoch": 0.01, "grad_norm": 1.9732831950233385, "learning_rate": 5.454545454545454e-05, "loss": 3.0448, "step": 12 }, { "epoch": 0.01, "grad_norm": 2.4457124204079697, "learning_rate": 5.909090909090908e-05, "loss": 2.9272, "step": 13 }, { "epoch": 0.02, "grad_norm": 2.0598932180566556, "learning_rate": 6.363636363636363e-05, "loss": 2.9642, "step": 14 }, { "epoch": 0.02, "grad_norm": 2.107103680028823, "learning_rate": 6.818181818181817e-05, "loss": 3.0298, "step": 15 }, { "epoch": 0.02, "grad_norm": 1.3767381599969715, "learning_rate": 7.272727272727273e-05, "loss": 2.7237, "step": 16 }, { "epoch": 0.02, "grad_norm": 1.117004905860884, "learning_rate": 7.727272727272726e-05, "loss": 2.9562, "step": 17 }, { "epoch": 0.02, "grad_norm": 1.105622408222587, "learning_rate": 8.18181818181818e-05, "loss": 2.8269, "step": 18 }, { "epoch": 0.02, "grad_norm": 1.082053097817895, "learning_rate": 8.636363636363636e-05, "loss": 2.7821, "step": 19 }, { "epoch": 0.02, "grad_norm": 0.9408580285208541, "learning_rate": 9.09090909090909e-05, "loss": 2.7491, "step": 20 }, { "epoch": 0.02, "grad_norm": 0.8190750865875447, "learning_rate": 9.545454545454545e-05, "loss": 2.6852, "step": 21 }, { "epoch": 0.03, "grad_norm": 0.8367821014312925, "learning_rate": 9.999999999999999e-05, "loss": 2.7615, "step": 22 }, { "epoch": 0.03, "grad_norm": 0.8405221570395635, "learning_rate": 0.00010454545454545455, "loss": 2.7083, "step": 23 }, { "epoch": 0.03, "grad_norm": 0.7218060245675572, "learning_rate": 0.00010909090909090908, "loss": 2.6305, "step": 24 }, { "epoch": 0.03, "grad_norm": 0.7530241549279189, "learning_rate": 0.00011363636363636362, "loss": 2.7175, "step": 25 }, { "epoch": 0.03, "grad_norm": 0.69826707957787, "learning_rate": 0.00011818181818181817, "loss": 2.627, "step": 26 }, { "epoch": 0.03, "grad_norm": 0.7092844786400335, "learning_rate": 0.00012272727272727272, "loss": 2.573, "step": 27 }, { "epoch": 0.03, "grad_norm": 0.7308898894607077, "learning_rate": 0.00012727272727272725, "loss": 2.5391, "step": 28 }, { "epoch": 0.03, "grad_norm": 0.83476706381957, "learning_rate": 0.0001318181818181818, "loss": 2.6457, "step": 29 }, { "epoch": 0.03, "grad_norm": 0.7316174354871663, "learning_rate": 0.00013636363636363634, "loss": 2.5442, "step": 30 }, { "epoch": 0.04, "grad_norm": 0.7729443438298704, "learning_rate": 0.0001409090909090909, "loss": 2.5913, "step": 31 }, { "epoch": 0.04, "grad_norm": 0.8265886407504855, "learning_rate": 0.00014545454545454546, "loss": 2.5899, "step": 32 }, { "epoch": 0.04, "grad_norm": 0.7849010418752537, "learning_rate": 0.00015, "loss": 2.3806, "step": 33 }, { "epoch": 0.04, "grad_norm": 0.768706410561798, "learning_rate": 0.00015454545454545452, "loss": 2.4802, "step": 34 }, { "epoch": 0.04, "grad_norm": 0.825827354040554, "learning_rate": 0.00015909090909090907, "loss": 2.5566, "step": 35 }, { "epoch": 0.04, "grad_norm": 0.7896766012089513, "learning_rate": 0.0001636363636363636, "loss": 2.474, "step": 36 }, { "epoch": 0.04, "grad_norm": 0.7923792808538245, "learning_rate": 0.00016818181818181816, "loss": 2.5262, "step": 37 }, { "epoch": 0.04, "grad_norm": 0.7689098715018539, "learning_rate": 0.00017272727272727272, "loss": 2.5691, "step": 38 }, { "epoch": 0.04, "grad_norm": 0.767033618278646, "learning_rate": 0.00017727272727272728, "loss": 2.6176, "step": 39 }, { "epoch": 0.05, "grad_norm": 0.7899073964461848, "learning_rate": 0.0001818181818181818, "loss": 2.4692, "step": 40 }, { "epoch": 0.05, "grad_norm": 0.7878128380115905, "learning_rate": 0.00018636363636363634, "loss": 2.4306, "step": 41 }, { "epoch": 0.05, "grad_norm": 0.8628534337236539, "learning_rate": 0.0001909090909090909, "loss": 2.5159, "step": 42 }, { "epoch": 0.05, "grad_norm": 0.9159194978075866, "learning_rate": 0.00019545454545454543, "loss": 2.4547, "step": 43 }, { "epoch": 0.05, "grad_norm": 0.7425336506751342, "learning_rate": 0.00019999999999999998, "loss": 2.5254, "step": 44 }, { "epoch": 0.05, "grad_norm": 0.7918065706770165, "learning_rate": 0.0002045454545454545, "loss": 2.5238, "step": 45 }, { "epoch": 0.05, "grad_norm": 0.93023321242069, "learning_rate": 0.0002090909090909091, "loss": 2.3934, "step": 46 }, { "epoch": 0.05, "grad_norm": 0.8399729995190708, "learning_rate": 0.00021363636363636363, "loss": 2.614, "step": 47 }, { "epoch": 0.05, "grad_norm": 0.7113366254982137, "learning_rate": 0.00021818181818181816, "loss": 2.6182, "step": 48 }, { "epoch": 0.06, "grad_norm": 0.7303674315076816, "learning_rate": 0.00022272727272727272, "loss": 2.3221, "step": 49 }, { "epoch": 0.06, "grad_norm": 0.7998959276662827, "learning_rate": 0.00022727272727272725, "loss": 2.4091, "step": 50 }, { "epoch": 0.06, "grad_norm": 0.690092499300873, "learning_rate": 0.0002318181818181818, "loss": 2.4381, "step": 51 }, { "epoch": 0.06, "grad_norm": 0.7197381669679382, "learning_rate": 0.00023636363636363633, "loss": 2.3981, "step": 52 }, { "epoch": 0.06, "grad_norm": 0.7185287871537785, "learning_rate": 0.00024090909090909086, "loss": 2.3902, "step": 53 }, { "epoch": 0.06, "grad_norm": 0.7235518752495292, "learning_rate": 0.00024545454545454545, "loss": 2.4423, "step": 54 }, { "epoch": 0.06, "grad_norm": 0.7515473737137839, "learning_rate": 0.00025, "loss": 2.6013, "step": 55 }, { "epoch": 0.06, "grad_norm": 0.7910566072750824, "learning_rate": 0.0002545454545454545, "loss": 2.5372, "step": 56 }, { "epoch": 0.07, "grad_norm": 0.8130676760453753, "learning_rate": 0.00025909090909090907, "loss": 2.5049, "step": 57 }, { "epoch": 0.07, "grad_norm": 0.662848917547693, "learning_rate": 0.0002636363636363636, "loss": 2.2621, "step": 58 }, { "epoch": 0.07, "grad_norm": 0.6759590128864548, "learning_rate": 0.0002681818181818181, "loss": 2.2966, "step": 59 }, { "epoch": 0.07, "grad_norm": 0.7912352849707737, "learning_rate": 0.0002727272727272727, "loss": 2.4878, "step": 60 }, { "epoch": 0.07, "grad_norm": 0.7232042345262207, "learning_rate": 0.00027727272727272724, "loss": 2.3753, "step": 61 }, { "epoch": 0.07, "grad_norm": 0.8730232315874268, "learning_rate": 0.0002818181818181818, "loss": 2.6525, "step": 62 }, { "epoch": 0.07, "grad_norm": 0.7376111939334709, "learning_rate": 0.00028636363636363636, "loss": 2.4162, "step": 63 }, { "epoch": 0.07, "grad_norm": 0.7655208092138928, "learning_rate": 0.0002909090909090909, "loss": 2.3876, "step": 64 }, { "epoch": 0.07, "grad_norm": 0.8566205692203729, "learning_rate": 0.0002954545454545454, "loss": 2.5353, "step": 65 }, { "epoch": 0.08, "grad_norm": 0.6917165870024561, "learning_rate": 0.0003, "loss": 2.416, "step": 66 }, { "epoch": 0.08, "grad_norm": 0.6849847639558945, "learning_rate": 0.00030454545454545453, "loss": 2.2864, "step": 67 }, { "epoch": 0.08, "grad_norm": 0.7429670439970971, "learning_rate": 0.00030909090909090903, "loss": 2.4441, "step": 68 }, { "epoch": 0.08, "grad_norm": 0.7350657256879124, "learning_rate": 0.0003136363636363636, "loss": 2.2988, "step": 69 }, { "epoch": 0.08, "grad_norm": 0.7440090830871278, "learning_rate": 0.00031818181818181815, "loss": 2.4653, "step": 70 }, { "epoch": 0.08, "grad_norm": 0.7421953307612521, "learning_rate": 0.00032272727272727265, "loss": 2.455, "step": 71 }, { "epoch": 0.08, "grad_norm": 0.8038207785921443, "learning_rate": 0.0003272727272727272, "loss": 2.5136, "step": 72 }, { "epoch": 0.08, "grad_norm": 0.7218637232393625, "learning_rate": 0.00033181818181818177, "loss": 2.4338, "step": 73 }, { "epoch": 0.08, "grad_norm": 0.6721445419819468, "learning_rate": 0.0003363636363636363, "loss": 2.3418, "step": 74 }, { "epoch": 0.09, "grad_norm": 0.7320452696928105, "learning_rate": 0.00034090909090909094, "loss": 2.2342, "step": 75 }, { "epoch": 0.09, "grad_norm": 0.7464232447318756, "learning_rate": 0.00034545454545454544, "loss": 2.4117, "step": 76 }, { "epoch": 0.09, "grad_norm": 0.698605323531886, "learning_rate": 0.00035, "loss": 2.2924, "step": 77 }, { "epoch": 0.09, "grad_norm": 0.7436672683627857, "learning_rate": 0.00035454545454545455, "loss": 2.2388, "step": 78 }, { "epoch": 0.09, "grad_norm": 0.8574919811414031, "learning_rate": 0.00035909090909090906, "loss": 2.4663, "step": 79 }, { "epoch": 0.09, "grad_norm": 0.7245285718098012, "learning_rate": 0.0003636363636363636, "loss": 2.3387, "step": 80 }, { "epoch": 0.09, "grad_norm": 0.8435232682833903, "learning_rate": 0.00036818181818181817, "loss": 2.308, "step": 81 }, { "epoch": 0.09, "grad_norm": 0.8448947322536565, "learning_rate": 0.0003727272727272727, "loss": 2.3755, "step": 82 }, { "epoch": 0.09, "grad_norm": 0.7649424363813991, "learning_rate": 0.00037727272727272723, "loss": 2.3842, "step": 83 }, { "epoch": 0.1, "grad_norm": 0.9002529478560403, "learning_rate": 0.0003818181818181818, "loss": 2.3533, "step": 84 }, { "epoch": 0.1, "grad_norm": 0.7967338255202507, "learning_rate": 0.0003863636363636363, "loss": 2.3302, "step": 85 }, { "epoch": 0.1, "grad_norm": 0.8197712490677916, "learning_rate": 0.00039090909090909085, "loss": 2.4142, "step": 86 }, { "epoch": 0.1, "grad_norm": 0.7200667454850096, "learning_rate": 0.0003954545454545454, "loss": 2.3043, "step": 87 }, { "epoch": 0.1, "grad_norm": 0.6850455051407842, "learning_rate": 0.00039999999999999996, "loss": 2.4058, "step": 88 }, { "epoch": 0.1, "grad_norm": 0.8566059491406931, "learning_rate": 0.00040454545454545447, "loss": 2.4595, "step": 89 }, { "epoch": 0.1, "grad_norm": 0.7659359572413186, "learning_rate": 0.000409090909090909, "loss": 2.3925, "step": 90 }, { "epoch": 0.1, "grad_norm": 0.7499339845147437, "learning_rate": 0.00041363636363636364, "loss": 2.3421, "step": 91 }, { "epoch": 0.11, "grad_norm": 0.8045934207501154, "learning_rate": 0.0004181818181818182, "loss": 2.4736, "step": 92 }, { "epoch": 0.11, "grad_norm": 0.7926827791059288, "learning_rate": 0.0004227272727272727, "loss": 2.3923, "step": 93 }, { "epoch": 0.11, "grad_norm": 0.8539750535911281, "learning_rate": 0.00042727272727272726, "loss": 2.4064, "step": 94 }, { "epoch": 0.11, "grad_norm": 0.7812006103072849, "learning_rate": 0.0004318181818181818, "loss": 2.3544, "step": 95 }, { "epoch": 0.11, "grad_norm": 0.7508701708365426, "learning_rate": 0.0004363636363636363, "loss": 2.3625, "step": 96 }, { "epoch": 0.11, "grad_norm": 0.7882493317728478, "learning_rate": 0.0004409090909090909, "loss": 2.2912, "step": 97 }, { "epoch": 0.11, "grad_norm": 0.8640221239109133, "learning_rate": 0.00044545454545454543, "loss": 2.3642, "step": 98 }, { "epoch": 0.11, "grad_norm": 0.8562783193732348, "learning_rate": 0.00045, "loss": 2.3395, "step": 99 }, { "epoch": 0.11, "grad_norm": 0.8565402876896437, "learning_rate": 0.0004545454545454545, "loss": 2.3879, "step": 100 }, { "epoch": 0.11, "eval_blimp_filtered_avg": 0.7208955223880597, "eval_blimp_filtered_std": 0.004852166696436156, "step": 100 }, { "epoch": 0.11, "eval_blimp_supplement_avg": 0.8275862068965517, "eval_blimp_supplement_std": 0.01641366169189489, "step": 100 }, { "epoch": 0.11, "eval_vqa_filtered_avg": 0.62, "eval_vqa_filtered_std": 0.048783173121456316, "step": 100 }, { "epoch": 0.11, "eval_winoground_filtered_avg": 0.65, "eval_winoground_filtered_std": 0.0479372485441102, "step": 100 }, { "epoch": 0.12, "grad_norm": 0.848722659509694, "learning_rate": 0.00045909090909090905, "loss": 2.4336, "step": 101 }, { "epoch": 0.12, "grad_norm": 0.7286363819256328, "learning_rate": 0.0004636363636363636, "loss": 2.3946, "step": 102 }, { "epoch": 0.12, "grad_norm": 0.7154719152168354, "learning_rate": 0.0004681818181818181, "loss": 2.2189, "step": 103 }, { "epoch": 0.12, "grad_norm": 0.8982880967041466, "learning_rate": 0.00047272727272727267, "loss": 2.5037, "step": 104 }, { "epoch": 0.12, "grad_norm": 0.8213734619436593, "learning_rate": 0.0004772727272727272, "loss": 2.3559, "step": 105 }, { "epoch": 0.12, "grad_norm": 0.8098967185804099, "learning_rate": 0.0004818181818181817, "loss": 2.3801, "step": 106 }, { "epoch": 0.12, "grad_norm": 0.7921099612603111, "learning_rate": 0.0004863636363636363, "loss": 2.5425, "step": 107 }, { "epoch": 0.12, "grad_norm": 0.7459522229159813, "learning_rate": 0.0004909090909090909, "loss": 2.3672, "step": 108 }, { "epoch": 0.12, "grad_norm": 0.8721611702885396, "learning_rate": 0.0004954545454545455, "loss": 2.3848, "step": 109 }, { "epoch": 0.13, "grad_norm": 0.8199871455498058, "learning_rate": 0.0005, "loss": 2.2948, "step": 110 }, { "epoch": 0.13, "grad_norm": 1.0244189637704555, "learning_rate": 0.0005045454545454546, "loss": 2.3687, "step": 111 }, { "epoch": 0.13, "grad_norm": 1.4451973525205113, "learning_rate": 0.000509090909090909, "loss": 2.4539, "step": 112 }, { "epoch": 0.13, "grad_norm": 3.3106109637222665, "learning_rate": 0.0005136363636363636, "loss": 2.4334, "step": 113 }, { "epoch": 0.13, "grad_norm": 0.9303969656410228, "learning_rate": 0.0005181818181818181, "loss": 2.4525, "step": 114 }, { "epoch": 0.13, "grad_norm": 1.0560909211384562, "learning_rate": 0.0005227272727272727, "loss": 2.5307, "step": 115 }, { "epoch": 0.13, "grad_norm": 8.52081284070849, "learning_rate": 0.0005272727272727272, "loss": 2.3618, "step": 116 }, { "epoch": 0.13, "grad_norm": 10.689581310240795, "learning_rate": 0.0005318181818181818, "loss": 2.6617, "step": 117 }, { "epoch": 0.13, "grad_norm": 1.4789724723429967, "learning_rate": 0.0005363636363636363, "loss": 2.3608, "step": 118 }, { "epoch": 0.14, "grad_norm": 1.1005859781641265, "learning_rate": 0.0005409090909090908, "loss": 2.4549, "step": 119 }, { "epoch": 0.14, "grad_norm": 1.0860458560243311, "learning_rate": 0.0005454545454545454, "loss": 2.4579, "step": 120 }, { "epoch": 0.14, "grad_norm": 1.274884810474263, "learning_rate": 0.0005499999999999999, "loss": 2.2827, "step": 121 }, { "epoch": 0.14, "grad_norm": 3.445996022497982, "learning_rate": 0.0005545454545454545, "loss": 2.4756, "step": 122 }, { "epoch": 0.14, "grad_norm": 7.571182718234623, "learning_rate": 0.000559090909090909, "loss": 2.5286, "step": 123 }, { "epoch": 0.14, "grad_norm": 11.756566387492118, "learning_rate": 0.0005636363636363636, "loss": 7.9232, "step": 124 }, { "epoch": 0.14, "grad_norm": 7.407187647981287, "learning_rate": 0.0005681818181818182, "loss": 10.9687, "step": 125 }, { "epoch": 0.14, "grad_norm": 15.952245908057185, "learning_rate": 0.0005727272727272727, "loss": 7.9906, "step": 126 }, { "epoch": 0.15, "grad_norm": 41.70405282560889, "learning_rate": 0.0005772727272727273, "loss": 26.8464, "step": 127 }, { "epoch": 0.15, "grad_norm": 499.71355857812347, "learning_rate": 0.0005818181818181818, "loss": 18.4571, "step": 128 }, { "epoch": 0.15, "grad_norm": 6.745040709099412, "learning_rate": 0.0005863636363636363, "loss": 7.4552, "step": 129 }, { "epoch": 0.15, "grad_norm": 24.194068208403817, "learning_rate": 0.0005909090909090908, "loss": 7.3717, "step": 130 }, { "epoch": 0.15, "grad_norm": 7.874309551242373, "learning_rate": 0.0005954545454545454, "loss": 7.0362, "step": 131 }, { "epoch": 0.15, "grad_norm": 20.521117059610425, "learning_rate": 0.0006, "loss": 6.9502, "step": 132 }, { "epoch": 0.15, "grad_norm": 13.941044053590273, "learning_rate": 0.0005999999177672297, "loss": 6.6814, "step": 133 }, { "epoch": 0.15, "grad_norm": 5.321148055548882, "learning_rate": 0.0005999996710689639, "loss": 6.4725, "step": 134 }, { "epoch": 0.15, "grad_norm": 8.868322201286633, "learning_rate": 0.000599999259905338, "loss": 6.3314, "step": 135 }, { "epoch": 0.16, "grad_norm": 7.508538090855016, "learning_rate": 0.0005999986842765774, "loss": 6.2596, "step": 136 }, { "epoch": 0.16, "grad_norm": 6.668338435607754, "learning_rate": 0.0005999979441829973, "loss": 6.1353, "step": 137 }, { "epoch": 0.16, "grad_norm": 3.5411530866911667, "learning_rate": 0.0005999970396250039, "loss": 6.2846, "step": 138 }, { "epoch": 0.16, "grad_norm": 3.450068098080256, "learning_rate": 0.0005999959706030928, "loss": 6.0639, "step": 139 }, { "epoch": 0.16, "grad_norm": 4.953357155459986, "learning_rate": 0.0005999947371178502, "loss": 6.0436, "step": 140 }, { "epoch": 0.16, "grad_norm": 5.574898327668541, "learning_rate": 0.0005999933391699523, "loss": 6.1235, "step": 141 }, { "epoch": 0.16, "grad_norm": 3.2293330805672427, "learning_rate": 0.0005999917767601654, "loss": 5.902, "step": 142 }, { "epoch": 0.16, "grad_norm": 5.864875802496051, "learning_rate": 0.0005999900498893461, "loss": 5.934, "step": 143 }, { "epoch": 0.16, "grad_norm": 2.1441009762203374, "learning_rate": 0.0005999881585584412, "loss": 5.6018, "step": 144 }, { "epoch": 0.17, "grad_norm": 2.4713219978500964, "learning_rate": 0.0005999861027684873, "loss": 5.8513, "step": 145 }, { "epoch": 0.17, "grad_norm": 5.011490708621217, "learning_rate": 0.0005999838825206117, "loss": 5.7966, "step": 146 }, { "epoch": 0.17, "grad_norm": 5.296766106768993, "learning_rate": 0.0005999814978160314, "loss": 5.7661, "step": 147 }, { "epoch": 0.17, "grad_norm": 3.3156093211436315, "learning_rate": 0.0005999789486560538, "loss": 5.644, "step": 148 }, { "epoch": 0.17, "grad_norm": 3.90596896447174, "learning_rate": 0.0005999762350420765, "loss": 5.6126, "step": 149 }, { "epoch": 0.17, "grad_norm": 5.2025734276036415, "learning_rate": 0.000599973356975587, "loss": 5.6422, "step": 150 }, { "epoch": 0.17, "grad_norm": 7.465258341112292, "learning_rate": 0.0005999703144581631, "loss": 5.694, "step": 151 }, { "epoch": 0.17, "grad_norm": 5.837301787659427, "learning_rate": 0.0005999671074914729, "loss": 5.8244, "step": 152 }, { "epoch": 0.17, "grad_norm": 7.565991330126649, "learning_rate": 0.0005999637360772745, "loss": 5.7922, "step": 153 }, { "epoch": 0.18, "grad_norm": 4.999233509442854, "learning_rate": 0.000599960200217416, "loss": 5.5981, "step": 154 }, { "epoch": 0.18, "grad_norm": 6.681485817945217, "learning_rate": 0.000599956499913836, "loss": 5.7056, "step": 155 }, { "epoch": 0.18, "grad_norm": 9.716396656739732, "learning_rate": 0.000599952635168563, "loss": 5.6408, "step": 156 }, { "epoch": 0.18, "grad_norm": 3.1723848744523653, "learning_rate": 0.0005999486059837158, "loss": 5.5681, "step": 157 }, { "epoch": 0.18, "grad_norm": 15.312953030922642, "learning_rate": 0.0005999444123615031, "loss": 5.6187, "step": 158 }, { "epoch": 0.18, "grad_norm": 3.820601369170831, "learning_rate": 0.000599940054304224, "loss": 5.6417, "step": 159 }, { "epoch": 0.18, "grad_norm": 9.755611579168663, "learning_rate": 0.0005999355318142679, "loss": 5.5611, "step": 160 }, { "epoch": 0.18, "grad_norm": 6.65680311577697, "learning_rate": 0.0005999308448941138, "loss": 5.7147, "step": 161 }, { "epoch": 0.19, "grad_norm": 5.732237027191075, "learning_rate": 0.0005999259935463312, "loss": 5.6456, "step": 162 }, { "epoch": 0.19, "grad_norm": 7.022805262879111, "learning_rate": 0.00059992097777358, "loss": 5.6301, "step": 163 }, { "epoch": 0.19, "grad_norm": 9.197891663547958, "learning_rate": 0.0005999157975786095, "loss": 5.3718, "step": 164 }, { "epoch": 0.19, "grad_norm": 6.768254237746249, "learning_rate": 0.0005999104529642598, "loss": 5.4128, "step": 165 }, { "epoch": 0.19, "grad_norm": 8.058083719937919, "learning_rate": 0.000599904943933461, "loss": 5.6592, "step": 166 }, { "epoch": 0.19, "grad_norm": 4.92968975403688, "learning_rate": 0.0005998992704892331, "loss": 5.543, "step": 167 }, { "epoch": 0.19, "grad_norm": 8.14093774557678, "learning_rate": 0.0005998934326346864, "loss": 5.5585, "step": 168 }, { "epoch": 0.19, "grad_norm": 3.531956565793252, "learning_rate": 0.0005998874303730214, "loss": 5.578, "step": 169 }, { "epoch": 0.19, "grad_norm": 13.482819349209533, "learning_rate": 0.0005998812637075286, "loss": 5.6677, "step": 170 }, { "epoch": 0.2, "grad_norm": 3.8332061660777974, "learning_rate": 0.0005998749326415888, "loss": 5.65, "step": 171 }, { "epoch": 0.2, "grad_norm": 11.173565402091388, "learning_rate": 0.0005998684371786726, "loss": 5.6192, "step": 172 }, { "epoch": 0.2, "grad_norm": 6.294773442321036, "learning_rate": 0.000599861777322341, "loss": 5.5706, "step": 173 }, { "epoch": 0.2, "grad_norm": 8.910379381554463, "learning_rate": 0.0005998549530762451, "loss": 5.577, "step": 174 }, { "epoch": 0.2, "grad_norm": 7.332440046920278, "learning_rate": 0.0005998479644441262, "loss": 5.4371, "step": 175 }, { "epoch": 0.2, "grad_norm": 2.3836159563843675, "learning_rate": 0.0005998408114298154, "loss": 5.6367, "step": 176 }, { "epoch": 0.2, "grad_norm": 6.935743283041769, "learning_rate": 0.0005998334940372341, "loss": 5.6484, "step": 177 }, { "epoch": 0.2, "grad_norm": 5.620663695889674, "learning_rate": 0.000599826012270394, "loss": 5.6219, "step": 178 }, { "epoch": 0.2, "grad_norm": 3.683170166497655, "learning_rate": 0.0005998183661333966, "loss": 5.4882, "step": 179 }, { "epoch": 0.21, "grad_norm": 7.464593588514576, "learning_rate": 0.0005998105556304337, "loss": 5.4765, "step": 180 }, { "epoch": 0.21, "grad_norm": 2.591596863133778, "learning_rate": 0.0005998025807657872, "loss": 5.6047, "step": 181 }, { "epoch": 0.21, "grad_norm": 6.205871708755827, "learning_rate": 0.000599794441543829, "loss": 5.5058, "step": 182 }, { "epoch": 0.21, "grad_norm": 4.775968599966468, "learning_rate": 0.0005997861379690212, "loss": 5.5412, "step": 183 }, { "epoch": 0.21, "grad_norm": 1.384345600715913, "learning_rate": 0.0005997776700459161, "loss": 5.5354, "step": 184 }, { "epoch": 0.21, "grad_norm": 4.202020516512133, "learning_rate": 0.0005997690377791558, "loss": 5.3729, "step": 185 }, { "epoch": 0.21, "grad_norm": 5.0705084062807435, "learning_rate": 0.0005997602411734726, "loss": 5.5032, "step": 186 }, { "epoch": 0.21, "grad_norm": 4.3294094230501985, "learning_rate": 0.0005997512802336893, "loss": 5.4449, "step": 187 }, { "epoch": 0.21, "grad_norm": 2.4692332837723323, "learning_rate": 0.0005997421549647179, "loss": 5.5083, "step": 188 }, { "epoch": 0.22, "grad_norm": 2.7661575504906524, "learning_rate": 0.0005997328653715617, "loss": 5.4265, "step": 189 }, { "epoch": 0.22, "grad_norm": 3.5537864998369266, "learning_rate": 0.000599723411459313, "loss": 5.5259, "step": 190 }, { "epoch": 0.22, "grad_norm": 4.208257372185947, "learning_rate": 0.0005997137932331547, "loss": 5.4558, "step": 191 }, { "epoch": 0.22, "grad_norm": 1.559454704818237, "learning_rate": 0.0005997040106983597, "loss": 5.5517, "step": 192 }, { "epoch": 0.22, "grad_norm": 5.069151643297439, "learning_rate": 0.000599694063860291, "loss": 5.4612, "step": 193 }, { "epoch": 0.22, "grad_norm": 2.081650485533267, "learning_rate": 0.0005996839527244015, "loss": 5.5635, "step": 194 }, { "epoch": 0.22, "grad_norm": 1.1955658961664455, "learning_rate": 0.0005996736772962345, "loss": 5.4068, "step": 195 }, { "epoch": 0.22, "grad_norm": 4.769961288928332, "learning_rate": 0.000599663237581423, "loss": 5.5319, "step": 196 }, { "epoch": 0.23, "grad_norm": 1.9670928537268961, "learning_rate": 0.0005996526335856905, "loss": 5.4161, "step": 197 }, { "epoch": 0.23, "grad_norm": 1.9091224818951296, "learning_rate": 0.00059964186531485, "loss": 5.5261, "step": 198 }, { "epoch": 0.23, "grad_norm": 2.431061434219896, "learning_rate": 0.0005996309327748052, "loss": 5.6589, "step": 199 }, { "epoch": 0.23, "grad_norm": 2.9165221110391326, "learning_rate": 0.0005996198359715492, "loss": 5.492, "step": 200 }, { "epoch": 0.23, "eval_blimp_filtered_avg": 0.5583582089552239, "eval_blimp_filtered_std": 0.005392968902149062, "step": 200 }, { "epoch": 0.23, "eval_blimp_supplement_avg": 0.5, "eval_blimp_supplement_std": 0.022937146380016354, "step": 200 }, { "epoch": 0.23, "eval_vqa_filtered_avg": 0.15, "eval_vqa_filtered_std": 0.035887028128263734, "step": 200 }, { "epoch": 0.23, "eval_winoground_filtered_avg": 0.73, "eval_winoground_filtered_std": 0.044619604333847394, "step": 200 }, { "epoch": 0.23, "grad_norm": 2.167737556078773, "learning_rate": 0.0005996085749111656, "loss": 5.5597, "step": 201 }, { "epoch": 0.23, "grad_norm": 2.029631763384967, "learning_rate": 0.000599597149599828, "loss": 5.3916, "step": 202 }, { "epoch": 0.23, "grad_norm": 1.7214113929516655, "learning_rate": 0.0005995855600437998, "loss": 5.3554, "step": 203 }, { "epoch": 0.23, "grad_norm": 1.0998878694297851, "learning_rate": 0.0005995738062494348, "loss": 5.4597, "step": 204 }, { "epoch": 0.23, "grad_norm": 1.1019114322716383, "learning_rate": 0.0005995618882231765, "loss": 5.5064, "step": 205 }, { "epoch": 0.24, "grad_norm": 1.1189164881363165, "learning_rate": 0.0005995498059715586, "loss": 5.4009, "step": 206 }, { "epoch": 0.24, "grad_norm": 0.7410137110895191, "learning_rate": 0.0005995375595012048, "loss": 5.446, "step": 207 }, { "epoch": 0.24, "grad_norm": 0.9707199130395817, "learning_rate": 0.0005995251488188291, "loss": 5.4441, "step": 208 }, { "epoch": 0.24, "grad_norm": 1.065245420986001, "learning_rate": 0.0005995125739312349, "loss": 5.2863, "step": 209 }, { "epoch": 0.24, "grad_norm": 1.1524204624051906, "learning_rate": 0.0005994998348453161, "loss": 5.4816, "step": 210 }, { "epoch": 0.24, "grad_norm": 1.0287862797008998, "learning_rate": 0.0005994869315680566, "loss": 5.4634, "step": 211 }, { "epoch": 0.24, "grad_norm": 1.512372637512798, "learning_rate": 0.0005994738641065301, "loss": 5.564, "step": 212 }, { "epoch": 0.24, "grad_norm": 1.0995126895671135, "learning_rate": 0.0005994606324679005, "loss": 5.5937, "step": 213 }, { "epoch": 0.24, "grad_norm": 2.7358722798358723, "learning_rate": 0.0005994472366594218, "loss": 5.4026, "step": 214 }, { "epoch": 0.25, "grad_norm": 1.4878559772883229, "learning_rate": 0.0005994336766884374, "loss": 5.5863, "step": 215 }, { "epoch": 0.25, "grad_norm": 1.0225436896988405, "learning_rate": 0.0005994199525623815, "loss": 5.3952, "step": 216 }, { "epoch": 0.25, "grad_norm": 0.692949224495956, "learning_rate": 0.0005994060642887777, "loss": 5.4499, "step": 217 }, { "epoch": 0.25, "grad_norm": 0.6579508153083156, "learning_rate": 0.00059939201187524, "loss": 5.4049, "step": 218 }, { "epoch": 0.25, "grad_norm": 0.8678506895141744, "learning_rate": 0.000599377795329472, "loss": 5.4027, "step": 219 }, { "epoch": 0.25, "grad_norm": 1.4765692135287831, "learning_rate": 0.0005993634146592675, "loss": 5.318, "step": 220 }, { "epoch": 0.25, "grad_norm": 3.9305110169647164, "learning_rate": 0.0005993488698725104, "loss": 5.5305, "step": 221 }, { "epoch": 0.25, "grad_norm": 1.6243146104972896, "learning_rate": 0.0005993341609771743, "loss": 5.3441, "step": 222 }, { "epoch": 0.25, "grad_norm": 1.2365997418732557, "learning_rate": 0.0005993192879813229, "loss": 5.6214, "step": 223 }, { "epoch": 0.26, "grad_norm": 1.727127566853998, "learning_rate": 0.0005993042508931098, "loss": 5.4702, "step": 224 }, { "epoch": 0.26, "grad_norm": 1.6263868670636483, "learning_rate": 0.0005992890497207787, "loss": 5.5085, "step": 225 }, { "epoch": 0.26, "grad_norm": 1.1798141656432553, "learning_rate": 0.0005992736844726632, "loss": 5.5564, "step": 226 }, { "epoch": 0.26, "grad_norm": 1.9811010293115763, "learning_rate": 0.0005992581551571869, "loss": 5.3057, "step": 227 }, { "epoch": 0.26, "grad_norm": 1.2110016657394074, "learning_rate": 0.0005992424617828629, "loss": 5.3765, "step": 228 }, { "epoch": 0.26, "grad_norm": 1.9457725713764498, "learning_rate": 0.0005992266043582949, "loss": 5.3678, "step": 229 }, { "epoch": 0.26, "grad_norm": 0.7598753147142062, "learning_rate": 0.0005992105828921759, "loss": 5.3839, "step": 230 }, { "epoch": 0.26, "grad_norm": 0.9859802460704546, "learning_rate": 0.0005991943973932898, "loss": 5.5403, "step": 231 }, { "epoch": 0.27, "grad_norm": 1.5094405952936216, "learning_rate": 0.000599178047870509, "loss": 5.5175, "step": 232 }, { "epoch": 0.27, "grad_norm": 3.782137157365417, "learning_rate": 0.0005991615343327973, "loss": 5.3597, "step": 233 }, { "epoch": 0.27, "grad_norm": 3.424200586465043, "learning_rate": 0.0005991448567892072, "loss": 5.4816, "step": 234 }, { "epoch": 0.27, "grad_norm": 3.8463539904688173, "learning_rate": 0.000599128015248882, "loss": 5.5235, "step": 235 }, { "epoch": 0.27, "grad_norm": 5.15458706387072, "learning_rate": 0.0005991110097210545, "loss": 5.3415, "step": 236 }, { "epoch": 0.27, "grad_norm": 2.295637360703834, "learning_rate": 0.0005990938402150473, "loss": 5.4644, "step": 237 }, { "epoch": 0.27, "grad_norm": 2.4095194713035415, "learning_rate": 0.0005990765067402729, "loss": 5.5004, "step": 238 }, { "epoch": 0.27, "grad_norm": 4.445918564956278, "learning_rate": 0.0005990590093062343, "loss": 5.6367, "step": 239 }, { "epoch": 0.27, "grad_norm": 1.8186308056122225, "learning_rate": 0.0005990413479225234, "loss": 5.4107, "step": 240 }, { "epoch": 0.28, "grad_norm": 2.0395943039589626, "learning_rate": 0.0005990235225988229, "loss": 5.6396, "step": 241 }, { "epoch": 0.28, "grad_norm": 3.8121187859046297, "learning_rate": 0.0005990055333449047, "loss": 5.4573, "step": 242 }, { "epoch": 0.28, "grad_norm": 0.947109922528538, "learning_rate": 0.0005989873801706311, "loss": 5.343, "step": 243 }, { "epoch": 0.28, "grad_norm": 4.624504859892276, "learning_rate": 0.0005989690630859538, "loss": 5.4623, "step": 244 }, { "epoch": 0.28, "grad_norm": 1.045752009774613, "learning_rate": 0.0005989505821009146, "loss": 5.2571, "step": 245 }, { "epoch": 0.28, "grad_norm": 1.5519833810335941, "learning_rate": 0.000598931937225645, "loss": 5.4051, "step": 246 }, { "epoch": 0.28, "grad_norm": 2.0673444247521937, "learning_rate": 0.0005989131284703668, "loss": 5.5218, "step": 247 }, { "epoch": 0.28, "grad_norm": 1.5078446622475545, "learning_rate": 0.000598894155845391, "loss": 5.4369, "step": 248 }, { "epoch": 0.28, "grad_norm": 3.2762063605381666, "learning_rate": 0.0005988750193611188, "loss": 5.5545, "step": 249 }, { "epoch": 0.29, "grad_norm": 1.5359646005552658, "learning_rate": 0.0005988557190280415, "loss": 5.3746, "step": 250 }, { "epoch": 0.29, "grad_norm": 3.216145337527657, "learning_rate": 0.0005988362548567394, "loss": 5.3403, "step": 251 }, { "epoch": 0.29, "grad_norm": 1.2244680077583707, "learning_rate": 0.0005988166268578834, "loss": 5.4123, "step": 252 }, { "epoch": 0.29, "grad_norm": 1.8514135451379858, "learning_rate": 0.0005987968350422339, "loss": 5.4508, "step": 253 }, { "epoch": 0.29, "grad_norm": 4.06517833291948, "learning_rate": 0.000598776879420641, "loss": 5.6311, "step": 254 }, { "epoch": 0.29, "grad_norm": 3.125027322594769, "learning_rate": 0.000598756760004045, "loss": 5.4056, "step": 255 }, { "epoch": 0.29, "grad_norm": 4.120762877602598, "learning_rate": 0.0005987364768034756, "loss": 5.2515, "step": 256 }, { "epoch": 0.29, "grad_norm": 3.0861484917793174, "learning_rate": 0.0005987160298300523, "loss": 5.4444, "step": 257 }, { "epoch": 0.29, "grad_norm": 3.2344190471659515, "learning_rate": 0.0005986954190949846, "loss": 5.3986, "step": 258 }, { "epoch": 0.3, "grad_norm": 1.3498810977093658, "learning_rate": 0.0005986746446095719, "loss": 5.4916, "step": 259 }, { "epoch": 0.3, "grad_norm": 2.9524087772184093, "learning_rate": 0.0005986537063852028, "loss": 5.3474, "step": 260 }, { "epoch": 0.3, "grad_norm": 1.5922022568819214, "learning_rate": 0.0005986326044333563, "loss": 5.4491, "step": 261 }, { "epoch": 0.3, "grad_norm": 3.2020390053423644, "learning_rate": 0.0005986113387656007, "loss": 5.5313, "step": 262 }, { "epoch": 0.3, "grad_norm": 2.693801444439002, "learning_rate": 0.0005985899093935943, "loss": 5.4478, "step": 263 }, { "epoch": 0.3, "grad_norm": 2.8898661885810233, "learning_rate": 0.0005985683163290851, "loss": 5.579, "step": 264 }, { "epoch": 0.3, "grad_norm": 3.5187766945426886, "learning_rate": 0.0005985465595839108, "loss": 5.3957, "step": 265 }, { "epoch": 0.3, "grad_norm": 0.9591204886163087, "learning_rate": 0.0005985246391699988, "loss": 5.3876, "step": 266 }, { "epoch": 0.31, "grad_norm": 3.4113356157199224, "learning_rate": 0.0005985025550993664, "loss": 5.3935, "step": 267 }, { "epoch": 0.31, "grad_norm": 3.6743713835932907, "learning_rate": 0.0005984803073841203, "loss": 5.4484, "step": 268 }, { "epoch": 0.31, "grad_norm": 2.7460614224183266, "learning_rate": 0.0005984578960364573, "loss": 5.3542, "step": 269 }, { "epoch": 0.31, "grad_norm": 2.2919084073262757, "learning_rate": 0.0005984353210686637, "loss": 5.4367, "step": 270 }, { "epoch": 0.31, "grad_norm": 2.3335021215126193, "learning_rate": 0.0005984125824931154, "loss": 5.3813, "step": 271 }, { "epoch": 0.31, "grad_norm": 1.2888274358816194, "learning_rate": 0.0005983896803222781, "loss": 5.2022, "step": 272 }, { "epoch": 0.31, "grad_norm": 0.8398910237675967, "learning_rate": 0.0005983666145687073, "loss": 5.2159, "step": 273 }, { "epoch": 0.31, "grad_norm": 4.76819729423109, "learning_rate": 0.000598343385245048, "loss": 5.344, "step": 274 }, { "epoch": 0.31, "grad_norm": 2.275794378535884, "learning_rate": 0.000598319992364035, "loss": 5.2194, "step": 275 }, { "epoch": 0.32, "grad_norm": 3.1574329818794626, "learning_rate": 0.0005982964359384927, "loss": 5.2684, "step": 276 }, { "epoch": 0.32, "grad_norm": 4.387885482413438, "learning_rate": 0.0005982727159813352, "loss": 5.1345, "step": 277 }, { "epoch": 0.32, "grad_norm": 15.56019981927672, "learning_rate": 0.000598248832505566, "loss": 5.6096, "step": 278 }, { "epoch": 0.32, "grad_norm": 10.360953035424545, "learning_rate": 0.0005982247855242786, "loss": 5.8703, "step": 279 }, { "epoch": 0.32, "grad_norm": 9.453270405337348, "learning_rate": 0.0005982005750506562, "loss": 6.0141, "step": 280 }, { "epoch": 0.32, "grad_norm": 6.755505970047711, "learning_rate": 0.0005981762010979712, "loss": 5.7943, "step": 281 }, { "epoch": 0.32, "grad_norm": 6.043611977119204, "learning_rate": 0.0005981516636795859, "loss": 5.7241, "step": 282 }, { "epoch": 0.32, "grad_norm": 6.285257344656154, "learning_rate": 0.0005981269628089521, "loss": 5.6993, "step": 283 }, { "epoch": 0.32, "grad_norm": 11.32621885018214, "learning_rate": 0.0005981020984996114, "loss": 5.6036, "step": 284 }, { "epoch": 0.33, "grad_norm": 14.74265893660116, "learning_rate": 0.000598077070765195, "loss": 5.5839, "step": 285 }, { "epoch": 0.33, "grad_norm": 8.291911457409011, "learning_rate": 0.0005980518796194231, "loss": 5.6241, "step": 286 }, { "epoch": 0.33, "grad_norm": 12.990504838977, "learning_rate": 0.0005980265250761064, "loss": 5.5118, "step": 287 }, { "epoch": 0.33, "grad_norm": 5.397553973530215, "learning_rate": 0.0005980010071491445, "loss": 5.6128, "step": 288 }, { "epoch": 0.33, "grad_norm": 4.489739675349318, "learning_rate": 0.0005979753258525269, "loss": 5.4221, "step": 289 }, { "epoch": 0.33, "grad_norm": 9.087930876881718, "learning_rate": 0.0005979494812003325, "loss": 5.5251, "step": 290 }, { "epoch": 0.33, "grad_norm": 10.346962793473262, "learning_rate": 0.0005979234732067298, "loss": 5.6508, "step": 291 }, { "epoch": 0.33, "grad_norm": 1.1026848414481896, "learning_rate": 0.000597897301885977, "loss": 5.418, "step": 292 }, { "epoch": 0.33, "grad_norm": 8.594176422850023, "learning_rate": 0.0005978709672524215, "loss": 5.452, "step": 293 }, { "epoch": 0.34, "grad_norm": 9.766287935711846, "learning_rate": 0.0005978444693205006, "loss": 5.4974, "step": 294 }, { "epoch": 0.34, "grad_norm": 3.7445810793140284, "learning_rate": 0.000597817808104741, "loss": 5.4102, "step": 295 }, { "epoch": 0.34, "grad_norm": 5.072418856913142, "learning_rate": 0.0005977909836197585, "loss": 5.4505, "step": 296 }, { "epoch": 0.34, "grad_norm": 5.517669457012214, "learning_rate": 0.0005977639958802592, "loss": 5.4652, "step": 297 }, { "epoch": 0.34, "grad_norm": 4.117130805350819, "learning_rate": 0.0005977368449010381, "loss": 5.5226, "step": 298 }, { "epoch": 0.34, "grad_norm": 1.8060157053815324, "learning_rate": 0.0005977095306969799, "loss": 5.3902, "step": 299 }, { "epoch": 0.34, "grad_norm": 2.39666403418197, "learning_rate": 0.0005976820532830587, "loss": 5.3725, "step": 300 }, { "epoch": 0.34, "eval_blimp_filtered_avg": 0.5594029850746268, "eval_blimp_filtered_std": 0.005466712800431388, "step": 300 }, { "epoch": 0.34, "eval_blimp_supplement_avg": 0.5258620689655172, "eval_blimp_supplement_std": 0.02302704073531634, "step": 300 }, { "epoch": 0.34, "eval_vqa_filtered_avg": 0.14, "eval_vqa_filtered_std": 0.03487350880197772, "step": 300 }, { "epoch": 0.34, "eval_winoground_filtered_avg": 0.57, "eval_winoground_filtered_std": 0.04975698519562428, "step": 300 }, { "epoch": 0.34, "grad_norm": 3.1158990392607375, "learning_rate": 0.0005976544126743383, "loss": 5.3367, "step": 301 }, { "epoch": 0.35, "grad_norm": 2.0154449703549338, "learning_rate": 0.0005976266088859715, "loss": 5.3467, "step": 302 }, { "epoch": 0.35, "grad_norm": 2.686812167362636, "learning_rate": 0.0005975986419332011, "loss": 5.3398, "step": 303 }, { "epoch": 0.35, "grad_norm": 4.509557450743787, "learning_rate": 0.0005975705118313589, "loss": 5.4016, "step": 304 }, { "epoch": 0.35, "grad_norm": 2.4512671460471145, "learning_rate": 0.0005975422185958666, "loss": 5.3296, "step": 305 }, { "epoch": 0.35, "grad_norm": 1.9090727371809768, "learning_rate": 0.0005975137622422349, "loss": 5.2443, "step": 306 }, { "epoch": 0.35, "grad_norm": 2.8644868008469704, "learning_rate": 0.0005974851427860639, "loss": 5.1167, "step": 307 }, { "epoch": 0.35, "grad_norm": 2.0056665540238834, "learning_rate": 0.0005974563602430437, "loss": 5.1504, "step": 308 }, { "epoch": 0.35, "grad_norm": 1.5508067504920053, "learning_rate": 0.0005974274146289533, "loss": 5.1995, "step": 309 }, { "epoch": 0.35, "grad_norm": 1.9266929387011655, "learning_rate": 0.0005973983059596612, "loss": 5.3255, "step": 310 }, { "epoch": 0.36, "grad_norm": 1.0464990054002434, "learning_rate": 0.0005973690342511252, "loss": 5.217, "step": 311 }, { "epoch": 0.36, "grad_norm": 1.5086874049787238, "learning_rate": 0.0005973395995193926, "loss": 5.2963, "step": 312 }, { "epoch": 0.36, "grad_norm": 2.236425026117851, "learning_rate": 0.0005973100017806003, "loss": 5.3029, "step": 313 }, { "epoch": 0.36, "grad_norm": 1.8345242276237188, "learning_rate": 0.000597280241050974, "loss": 5.3068, "step": 314 }, { "epoch": 0.36, "grad_norm": 3.387131216317475, "learning_rate": 0.0005972503173468295, "loss": 5.2659, "step": 315 }, { "epoch": 0.36, "grad_norm": 4.9929778154271, "learning_rate": 0.0005972202306845711, "loss": 5.3761, "step": 316 }, { "epoch": 0.36, "grad_norm": 201.58557269872338, "learning_rate": 0.0005971899810806932, "loss": 5.3516, "step": 317 }, { "epoch": 0.36, "grad_norm": 1.5348126892692233, "learning_rate": 0.0005971595685517789, "loss": 5.1978, "step": 318 }, { "epoch": 0.36, "grad_norm": 2.33872219585695, "learning_rate": 0.000597128993114501, "loss": 5.1429, "step": 319 }, { "epoch": 0.37, "grad_norm": 4.042865971234179, "learning_rate": 0.0005970982547856215, "loss": 5.2872, "step": 320 }, { "epoch": 0.37, "grad_norm": 2.8010061051085016, "learning_rate": 0.000597067353581992, "loss": 5.4718, "step": 321 }, { "epoch": 0.37, "grad_norm": 3.1546143762495875, "learning_rate": 0.0005970362895205528, "loss": 5.3273, "step": 322 }, { "epoch": 0.37, "grad_norm": 3.68635883224407, "learning_rate": 0.0005970050626183339, "loss": 5.4596, "step": 323 }, { "epoch": 0.37, "grad_norm": 2.9089188336152296, "learning_rate": 0.0005969736728924544, "loss": 5.2995, "step": 324 }, { "epoch": 0.37, "grad_norm": 4.381816442255853, "learning_rate": 0.0005969421203601228, "loss": 5.314, "step": 325 }, { "epoch": 0.37, "grad_norm": 12.997403626989605, "learning_rate": 0.0005969104050386367, "loss": 5.5858, "step": 326 }, { "epoch": 0.37, "grad_norm": 37.83285263097063, "learning_rate": 0.0005968785269453832, "loss": 5.4655, "step": 327 }, { "epoch": 0.37, "grad_norm": 2.2981382083138504, "learning_rate": 0.0005968464860978384, "loss": 5.4473, "step": 328 }, { "epoch": 0.38, "grad_norm": 1.0912350924066583, "learning_rate": 0.0005968142825135674, "loss": 5.5116, "step": 329 }, { "epoch": 0.38, "grad_norm": 3.909520922703617, "learning_rate": 0.0005967819162102252, "loss": 5.4709, "step": 330 }, { "epoch": 0.38, "grad_norm": 2.664963702894739, "learning_rate": 0.0005967493872055553, "loss": 5.5243, "step": 331 }, { "epoch": 0.38, "grad_norm": 2.4398470866271924, "learning_rate": 0.0005967166955173909, "loss": 5.3821, "step": 332 }, { "epoch": 0.38, "grad_norm": 4.397848683946831, "learning_rate": 0.0005966838411636542, "loss": 5.5714, "step": 333 }, { "epoch": 0.38, "grad_norm": 3.0323067559721197, "learning_rate": 0.0005966508241623563, "loss": 5.3786, "step": 334 }, { "epoch": 0.38, "grad_norm": 3.075943496760952, "learning_rate": 0.0005966176445315981, "loss": 5.5032, "step": 335 }, { "epoch": 0.38, "grad_norm": 3.031354517772589, "learning_rate": 0.0005965843022895691, "loss": 5.269, "step": 336 }, { "epoch": 0.39, "grad_norm": 3.868262455582384, "learning_rate": 0.000596550797454548, "loss": 5.5177, "step": 337 }, { "epoch": 0.39, "grad_norm": 3.605041610953078, "learning_rate": 0.000596517130044903, "loss": 5.3174, "step": 338 }, { "epoch": 0.39, "grad_norm": 1.8526656565631017, "learning_rate": 0.000596483300079091, "loss": 5.3418, "step": 339 }, { "epoch": 0.39, "grad_norm": 1.6520173949425077, "learning_rate": 0.0005964493075756584, "loss": 5.3879, "step": 340 }, { "epoch": 0.39, "grad_norm": 2.141926894948423, "learning_rate": 0.0005964151525532403, "loss": 5.2671, "step": 341 }, { "epoch": 0.39, "grad_norm": 2.404688978602847, "learning_rate": 0.0005963808350305615, "loss": 5.3187, "step": 342 }, { "epoch": 0.39, "grad_norm": 2.6927056224349126, "learning_rate": 0.000596346355026435, "loss": 5.3977, "step": 343 }, { "epoch": 0.39, "grad_norm": 2.6423084575806937, "learning_rate": 0.0005963117125597638, "loss": 5.4097, "step": 344 }, { "epoch": 0.39, "grad_norm": 1.1573791089115377, "learning_rate": 0.0005962769076495392, "loss": 5.2198, "step": 345 }, { "epoch": 0.4, "grad_norm": 2.7931075806725527, "learning_rate": 0.0005962419403148421, "loss": 5.0811, "step": 346 }, { "epoch": 0.4, "grad_norm": 2.4427168742098697, "learning_rate": 0.0005962068105748422, "loss": 5.1713, "step": 347 }, { "epoch": 0.4, "grad_norm": 1.2507327640090309, "learning_rate": 0.0005961715184487982, "loss": 5.1729, "step": 348 }, { "epoch": 0.4, "grad_norm": 1.2707946253594813, "learning_rate": 0.000596136063956058, "loss": 5.2439, "step": 349 }, { "epoch": 0.4, "grad_norm": 1.295944116363648, "learning_rate": 0.0005961004471160584, "loss": 5.1512, "step": 350 }, { "epoch": 0.4, "grad_norm": 1.6712103533318206, "learning_rate": 0.0005960646679483252, "loss": 5.1529, "step": 351 }, { "epoch": 0.4, "grad_norm": 1.4711303228066783, "learning_rate": 0.0005960287264724732, "loss": 5.122, "step": 352 }, { "epoch": 0.4, "grad_norm": 1.074882693360815, "learning_rate": 0.000595992622708206, "loss": 5.1299, "step": 353 }, { "epoch": 0.4, "grad_norm": 1.8127248924936334, "learning_rate": 0.0005959563566753166, "loss": 5.3082, "step": 354 }, { "epoch": 0.41, "grad_norm": 1.374662595270538, "learning_rate": 0.0005959199283936866, "loss": 5.0728, "step": 355 }, { "epoch": 0.41, "grad_norm": 1.8806906988133945, "learning_rate": 0.0005958833378832867, "loss": 5.197, "step": 356 }, { "epoch": 0.41, "grad_norm": 1.0376267278197344, "learning_rate": 0.0005958465851641765, "loss": 5.1509, "step": 357 }, { "epoch": 0.41, "grad_norm": 0.8749449887306634, "learning_rate": 0.0005958096702565044, "loss": 5.1594, "step": 358 }, { "epoch": 0.41, "grad_norm": 0.8816726184283791, "learning_rate": 0.0005957725931805079, "loss": 5.1314, "step": 359 }, { "epoch": 0.41, "grad_norm": 1.2629463657867646, "learning_rate": 0.0005957353539565134, "loss": 5.0803, "step": 360 }, { "epoch": 0.41, "grad_norm": 0.7763445158930895, "learning_rate": 0.0005956979526049362, "loss": 5.2906, "step": 361 }, { "epoch": 0.41, "grad_norm": 0.9197127496341845, "learning_rate": 0.0005956603891462801, "loss": 5.0621, "step": 362 }, { "epoch": 0.41, "grad_norm": 1.1624221182677006, "learning_rate": 0.0005956226636011384, "loss": 5.142, "step": 363 }, { "epoch": 0.42, "grad_norm": 0.6854299500361504, "learning_rate": 0.0005955847759901928, "loss": 5.188, "step": 364 }, { "epoch": 0.42, "grad_norm": 0.7734036187759444, "learning_rate": 0.0005955467263342141, "loss": 5.0008, "step": 365 }, { "epoch": 0.42, "grad_norm": 1.0137525842490933, "learning_rate": 0.0005955085146540616, "loss": 5.104, "step": 366 }, { "epoch": 0.42, "grad_norm": 0.9554878057956477, "learning_rate": 0.0005954701409706838, "loss": 5.1863, "step": 367 }, { "epoch": 0.42, "grad_norm": 1.0250295336851294, "learning_rate": 0.0005954316053051179, "loss": 4.9032, "step": 368 }, { "epoch": 0.42, "grad_norm": 0.7310318298965819, "learning_rate": 0.0005953929076784897, "loss": 5.1252, "step": 369 }, { "epoch": 0.42, "grad_norm": 1.1490493756860594, "learning_rate": 0.0005953540481120141, "loss": 5.2095, "step": 370 }, { "epoch": 0.42, "grad_norm": 0.8557028871037503, "learning_rate": 0.0005953150266269947, "loss": 4.8182, "step": 371 }, { "epoch": 0.43, "grad_norm": 1.0434349376178245, "learning_rate": 0.0005952758432448236, "loss": 4.9799, "step": 372 }, { "epoch": 0.43, "grad_norm": 0.8955963805324878, "learning_rate": 0.0005952364979869819, "loss": 5.0193, "step": 373 }, { "epoch": 0.43, "grad_norm": 1.2224143509311085, "learning_rate": 0.0005951969908750397, "loss": 5.0098, "step": 374 }, { "epoch": 0.43, "grad_norm": 1.259509324789556, "learning_rate": 0.000595157321930655, "loss": 5.0709, "step": 375 }, { "epoch": 0.43, "grad_norm": 2.1567974766589004, "learning_rate": 0.0005951174911755754, "loss": 4.9933, "step": 376 }, { "epoch": 0.43, "grad_norm": 2.2152692642440783, "learning_rate": 0.0005950774986316368, "loss": 5.1693, "step": 377 }, { "epoch": 0.43, "grad_norm": 1.8144612613972055, "learning_rate": 0.0005950373443207638, "loss": 4.9564, "step": 378 }, { "epoch": 0.43, "grad_norm": 1.3709706712775995, "learning_rate": 0.0005949970282649698, "loss": 4.8974, "step": 379 }, { "epoch": 0.43, "grad_norm": 0.9006209599805071, "learning_rate": 0.0005949565504863567, "loss": 5.1648, "step": 380 }, { "epoch": 0.44, "grad_norm": 1.6336451675308545, "learning_rate": 0.0005949159110071152, "loss": 5.1214, "step": 381 }, { "epoch": 0.44, "grad_norm": 0.6777576063871181, "learning_rate": 0.0005948751098495248, "loss": 4.9723, "step": 382 }, { "epoch": 0.44, "grad_norm": 0.96408538708768, "learning_rate": 0.0005948341470359532, "loss": 5.0241, "step": 383 }, { "epoch": 0.44, "grad_norm": 1.0598865299320908, "learning_rate": 0.0005947930225888571, "loss": 4.9936, "step": 384 }, { "epoch": 0.44, "grad_norm": 1.3142792056542076, "learning_rate": 0.0005947517365307816, "loss": 5.0602, "step": 385 }, { "epoch": 0.44, "grad_norm": 0.9271718939336193, "learning_rate": 0.0005947102888843606, "loss": 4.9825, "step": 386 }, { "epoch": 0.44, "grad_norm": 1.1892966742310114, "learning_rate": 0.0005946686796723164, "loss": 5.1048, "step": 387 }, { "epoch": 0.44, "grad_norm": 0.9909909107414631, "learning_rate": 0.0005946269089174598, "loss": 4.9013, "step": 388 }, { "epoch": 0.44, "grad_norm": 0.7823938314555703, "learning_rate": 0.0005945849766426904, "loss": 4.8844, "step": 389 }, { "epoch": 0.45, "grad_norm": 1.2634648640303658, "learning_rate": 0.0005945428828709965, "loss": 5.0096, "step": 390 }, { "epoch": 0.45, "grad_norm": 1.7090472799200376, "learning_rate": 0.0005945006276254543, "loss": 5.1439, "step": 391 }, { "epoch": 0.45, "grad_norm": 0.8750037047061195, "learning_rate": 0.0005944582109292291, "loss": 5.0526, "step": 392 }, { "epoch": 0.45, "grad_norm": 1.5321473379163821, "learning_rate": 0.0005944156328055745, "loss": 4.9529, "step": 393 }, { "epoch": 0.45, "grad_norm": 1.1735041582282233, "learning_rate": 0.0005943728932778326, "loss": 4.9819, "step": 394 }, { "epoch": 0.45, "grad_norm": 1.1340206655506173, "learning_rate": 0.0005943299923694339, "loss": 5.0917, "step": 395 }, { "epoch": 0.45, "grad_norm": 0.690818830374794, "learning_rate": 0.0005942869301038977, "loss": 5.1355, "step": 396 }, { "epoch": 0.45, "grad_norm": 0.9200426326800019, "learning_rate": 0.0005942437065048314, "loss": 5.0101, "step": 397 }, { "epoch": 0.45, "grad_norm": 0.9112682533831391, "learning_rate": 0.0005942003215959309, "loss": 5.1829, "step": 398 }, { "epoch": 0.46, "grad_norm": 1.2850557209928255, "learning_rate": 0.0005941567754009807, "loss": 5.2344, "step": 399 }, { "epoch": 0.46, "grad_norm": 1.5161343025925256, "learning_rate": 0.0005941130679438535, "loss": 5.1105, "step": 400 }, { "epoch": 0.46, "eval_blimp_filtered_avg": 0.4855223880597015, "eval_blimp_filtered_std": 0.005645894409064591, "step": 400 }, { "epoch": 0.46, "eval_blimp_supplement_avg": 0.4849137931034483, "eval_blimp_supplement_std": 0.022726419008559078, "step": 400 }, { "epoch": 0.46, "eval_vqa_filtered_avg": 0.34, "eval_vqa_filtered_std": 0.04760952285695236, "step": 400 }, { "epoch": 0.46, "eval_winoground_filtered_avg": 0.5, "eval_winoground_filtered_std": 0.050251890762960605, "step": 400 }, { "epoch": 0.46, "grad_norm": 1.5745987301659912, "learning_rate": 0.0005940691992485107, "loss": 5.0795, "step": 401 }, { "epoch": 0.46, "grad_norm": 0.8670176373799062, "learning_rate": 0.0005940251693390019, "loss": 4.9098, "step": 402 }, { "epoch": 0.46, "grad_norm": 0.8315657107530495, "learning_rate": 0.0005939809782394652, "loss": 4.9378, "step": 403 }, { "epoch": 0.46, "grad_norm": 0.9502158550245103, "learning_rate": 0.0005939366259741266, "loss": 4.8298, "step": 404 }, { "epoch": 0.46, "grad_norm": 1.6230339039493338, "learning_rate": 0.0005938921125673012, "loss": 5.1555, "step": 405 }, { "epoch": 0.46, "grad_norm": 0.8372063888595541, "learning_rate": 0.000593847438043392, "loss": 4.9752, "step": 406 }, { "epoch": 0.47, "grad_norm": 0.8947276246444711, "learning_rate": 0.0005938026024268903, "loss": 4.9903, "step": 407 }, { "epoch": 0.47, "grad_norm": 1.30872518499831, "learning_rate": 0.000593757605742376, "loss": 5.1611, "step": 408 }, { "epoch": 0.47, "grad_norm": 0.7021113364896897, "learning_rate": 0.0005937124480145169, "loss": 4.8711, "step": 409 }, { "epoch": 0.47, "grad_norm": 1.6180190134793453, "learning_rate": 0.0005936671292680693, "loss": 4.9429, "step": 410 }, { "epoch": 0.47, "grad_norm": 0.8145336385971147, "learning_rate": 0.000593621649527878, "loss": 5.0408, "step": 411 }, { "epoch": 0.47, "grad_norm": 0.9413657734536987, "learning_rate": 0.0005935760088188756, "loss": 5.0782, "step": 412 }, { "epoch": 0.47, "grad_norm": 2.396177954457218, "learning_rate": 0.0005935302071660833, "loss": 5.0698, "step": 413 }, { "epoch": 0.47, "grad_norm": 2.396649026042867, "learning_rate": 0.0005934842445946104, "loss": 4.9115, "step": 414 }, { "epoch": 0.47, "grad_norm": 1.1462949756338208, "learning_rate": 0.0005934381211296544, "loss": 5.3148, "step": 415 }, { "epoch": 0.48, "grad_norm": 2.6768886775268643, "learning_rate": 0.0005933918367965011, "loss": 4.9477, "step": 416 }, { "epoch": 0.48, "grad_norm": 1.0395456261469278, "learning_rate": 0.0005933453916205242, "loss": 4.9317, "step": 417 }, { "epoch": 0.48, "grad_norm": 1.85315568690304, "learning_rate": 0.0005932987856271862, "loss": 4.9963, "step": 418 }, { "epoch": 0.48, "grad_norm": 0.8551974729633977, "learning_rate": 0.000593252018842037, "loss": 4.7788, "step": 419 }, { "epoch": 0.48, "grad_norm": 0.8686686531784708, "learning_rate": 0.0005932050912907153, "loss": 5.084, "step": 420 }, { "epoch": 0.48, "grad_norm": 0.9163273071238545, "learning_rate": 0.0005931580029989474, "loss": 5.0167, "step": 421 }, { "epoch": 0.48, "grad_norm": 0.9877793748773773, "learning_rate": 0.0005931107539925481, "loss": 5.0818, "step": 422 }, { "epoch": 0.48, "grad_norm": 1.0505100775641891, "learning_rate": 0.0005930633442974201, "loss": 4.8086, "step": 423 }, { "epoch": 0.48, "grad_norm": 0.8179493833075077, "learning_rate": 0.0005930157739395545, "loss": 4.9411, "step": 424 }, { "epoch": 0.49, "grad_norm": 0.7244350892354696, "learning_rate": 0.0005929680429450299, "loss": 5.0448, "step": 425 }, { "epoch": 0.49, "grad_norm": 1.0499001783554662, "learning_rate": 0.0005929201513400135, "loss": 4.8911, "step": 426 }, { "epoch": 0.49, "grad_norm": 0.8128710046308487, "learning_rate": 0.0005928720991507604, "loss": 4.7541, "step": 427 }, { "epoch": 0.49, "grad_norm": 0.9899594417674721, "learning_rate": 0.0005928238864036138, "loss": 4.9249, "step": 428 }, { "epoch": 0.49, "grad_norm": 0.8519475487475884, "learning_rate": 0.0005927755131250045, "loss": 4.9531, "step": 429 }, { "epoch": 0.49, "grad_norm": 1.464984549099713, "learning_rate": 0.0005927269793414518, "loss": 4.9396, "step": 430 }, { "epoch": 0.49, "grad_norm": 1.201953047130527, "learning_rate": 0.0005926782850795628, "loss": 4.734, "step": 431 }, { "epoch": 0.49, "grad_norm": 1.2717974382494048, "learning_rate": 0.0005926294303660327, "loss": 4.794, "step": 432 }, { "epoch": 0.49, "grad_norm": 0.993575908219525, "learning_rate": 0.0005925804152276445, "loss": 5.0263, "step": 433 }, { "epoch": 0.5, "grad_norm": 1.3683592253935484, "learning_rate": 0.000592531239691269, "loss": 4.9283, "step": 434 }, { "epoch": 0.5, "grad_norm": 2.153023199628829, "learning_rate": 0.0005924819037838655, "loss": 4.9314, "step": 435 }, { "epoch": 0.5, "grad_norm": 2.2501127620208763, "learning_rate": 0.0005924324075324805, "loss": 4.7887, "step": 436 }, { "epoch": 0.5, "grad_norm": 2.3693157557954656, "learning_rate": 0.0005923827509642489, "loss": 4.8759, "step": 437 }, { "epoch": 0.5, "grad_norm": 3.8303345750470963, "learning_rate": 0.0005923329341063935, "loss": 5.0531, "step": 438 }, { "epoch": 0.5, "grad_norm": 1.880241403582436, "learning_rate": 0.0005922829569862245, "loss": 4.7967, "step": 439 }, { "epoch": 0.5, "grad_norm": 2.4157370241388945, "learning_rate": 0.0005922328196311407, "loss": 4.9579, "step": 440 }, { "epoch": 0.5, "grad_norm": 1.4493882366755244, "learning_rate": 0.0005921825220686279, "loss": 4.9787, "step": 441 }, { "epoch": 0.51, "grad_norm": 3.8163848850813746, "learning_rate": 0.0005921320643262605, "loss": 4.9163, "step": 442 }, { "epoch": 0.51, "grad_norm": 1.6892536001994958, "learning_rate": 0.0005920814464317, "loss": 4.9446, "step": 443 }, { "epoch": 0.51, "grad_norm": 4.07121158587927, "learning_rate": 0.0005920306684126965, "loss": 4.896, "step": 444 }, { "epoch": 0.51, "grad_norm": 1.4684627849248248, "learning_rate": 0.0005919797302970871, "loss": 4.9125, "step": 445 }, { "epoch": 0.51, "grad_norm": 2.9451580614618247, "learning_rate": 0.0005919286321127971, "loss": 4.8022, "step": 446 }, { "epoch": 0.51, "grad_norm": 1.8240641219923726, "learning_rate": 0.0005918773738878396, "loss": 5.0572, "step": 447 }, { "epoch": 0.51, "grad_norm": 1.782360286052058, "learning_rate": 0.0005918259556503152, "loss": 4.9731, "step": 448 }, { "epoch": 0.51, "grad_norm": 1.290655720816401, "learning_rate": 0.0005917743774284123, "loss": 4.8638, "step": 449 }, { "epoch": 0.51, "grad_norm": 1.6603345265998382, "learning_rate": 0.0005917226392504071, "loss": 4.998, "step": 450 }, { "epoch": 0.52, "grad_norm": 1.7725356772048324, "learning_rate": 0.0005916707411446634, "loss": 4.7678, "step": 451 }, { "epoch": 0.52, "grad_norm": 0.9364237803302594, "learning_rate": 0.0005916186831396327, "loss": 4.8794, "step": 452 }, { "epoch": 0.52, "grad_norm": 1.3183289526726105, "learning_rate": 0.0005915664652638541, "loss": 4.7109, "step": 453 }, { "epoch": 0.52, "grad_norm": 2.5497725632182764, "learning_rate": 0.0005915140875459546, "loss": 5.0924, "step": 454 }, { "epoch": 0.52, "grad_norm": 2.381288240314602, "learning_rate": 0.0005914615500146483, "loss": 5.0862, "step": 455 }, { "epoch": 0.52, "grad_norm": 1.4030847321270352, "learning_rate": 0.0005914088526987375, "loss": 4.919, "step": 456 }, { "epoch": 0.52, "grad_norm": 1.2577837464333326, "learning_rate": 0.0005913559956271119, "loss": 5.0596, "step": 457 }, { "epoch": 0.52, "grad_norm": 2.1393888737298927, "learning_rate": 0.0005913029788287486, "loss": 4.6101, "step": 458 }, { "epoch": 0.52, "grad_norm": 1.7531951071642504, "learning_rate": 0.0005912498023327123, "loss": 4.6786, "step": 459 }, { "epoch": 0.53, "grad_norm": 1.2634790648468592, "learning_rate": 0.0005911964661681554, "loss": 4.8421, "step": 460 }, { "epoch": 0.53, "grad_norm": 1.4674237189664705, "learning_rate": 0.0005911429703643179, "loss": 4.8122, "step": 461 }, { "epoch": 0.53, "grad_norm": 2.624924510258025, "learning_rate": 0.000591089314950527, "loss": 5.0403, "step": 462 }, { "epoch": 0.53, "grad_norm": 1.414204484535188, "learning_rate": 0.0005910354999561977, "loss": 4.7091, "step": 463 }, { "epoch": 0.53, "grad_norm": 2.443404140277432, "learning_rate": 0.0005909815254108323, "loss": 4.8525, "step": 464 }, { "epoch": 0.53, "grad_norm": 0.8264800002970221, "learning_rate": 0.0005909273913440208, "loss": 4.8763, "step": 465 }, { "epoch": 0.53, "grad_norm": 1.7602043521271793, "learning_rate": 0.0005908730977854403, "loss": 4.7583, "step": 466 }, { "epoch": 0.53, "grad_norm": 1.3402586010335686, "learning_rate": 0.0005908186447648557, "loss": 4.8228, "step": 467 }, { "epoch": 0.53, "grad_norm": 1.7136624019422253, "learning_rate": 0.000590764032312119, "loss": 4.9283, "step": 468 }, { "epoch": 0.54, "grad_norm": 0.978101534272162, "learning_rate": 0.00059070926045717, "loss": 4.8035, "step": 469 }, { "epoch": 0.54, "grad_norm": 1.9530177933535502, "learning_rate": 0.0005906543292300352, "loss": 4.7629, "step": 470 }, { "epoch": 0.54, "grad_norm": 1.1346642108929814, "learning_rate": 0.0005905992386608293, "loss": 4.7692, "step": 471 }, { "epoch": 0.54, "grad_norm": 1.0308167631358847, "learning_rate": 0.0005905439887797538, "loss": 4.7912, "step": 472 }, { "epoch": 0.54, "grad_norm": 1.9875049162022476, "learning_rate": 0.0005904885796170979, "loss": 4.8697, "step": 473 }, { "epoch": 0.54, "grad_norm": 1.3418470115492953, "learning_rate": 0.0005904330112032377, "loss": 4.9477, "step": 474 }, { "epoch": 0.54, "grad_norm": 1.0480622368131916, "learning_rate": 0.0005903772835686369, "loss": 4.593, "step": 475 }, { "epoch": 0.54, "grad_norm": 1.354536300576432, "learning_rate": 0.0005903213967438464, "loss": 4.7671, "step": 476 }, { "epoch": 0.55, "grad_norm": 1.1517031804785895, "learning_rate": 0.0005902653507595044, "loss": 4.7934, "step": 477 }, { "epoch": 0.55, "grad_norm": 1.357036532682106, "learning_rate": 0.0005902091456463365, "loss": 4.7012, "step": 478 }, { "epoch": 0.55, "grad_norm": 1.6412632756399552, "learning_rate": 0.0005901527814351551, "loss": 4.6334, "step": 479 }, { "epoch": 0.55, "grad_norm": 1.008841635849965, "learning_rate": 0.0005900962581568603, "loss": 4.8036, "step": 480 }, { "epoch": 0.55, "grad_norm": 1.1715832602810776, "learning_rate": 0.0005900395758424392, "loss": 4.8931, "step": 481 }, { "epoch": 0.55, "grad_norm": 1.1389431882251422, "learning_rate": 0.000589982734522966, "loss": 4.9751, "step": 482 }, { "epoch": 0.55, "grad_norm": 2.529634724119913, "learning_rate": 0.0005899257342296021, "loss": 4.7829, "step": 483 }, { "epoch": 0.55, "grad_norm": 3.7113162580036616, "learning_rate": 0.0005898685749935964, "loss": 4.7818, "step": 484 }, { "epoch": 0.55, "grad_norm": 2.14422545664322, "learning_rate": 0.0005898112568462843, "loss": 4.8621, "step": 485 }, { "epoch": 0.56, "grad_norm": 2.3386824191583413, "learning_rate": 0.0005897537798190889, "loss": 4.8773, "step": 486 }, { "epoch": 0.56, "grad_norm": 2.2393840500097504, "learning_rate": 0.0005896961439435202, "loss": 4.9035, "step": 487 }, { "epoch": 0.56, "grad_norm": 2.5573095920756312, "learning_rate": 0.000589638349251175, "loss": 4.691, "step": 488 }, { "epoch": 0.56, "grad_norm": 2.9106696288016267, "learning_rate": 0.0005895803957737376, "loss": 4.7256, "step": 489 }, { "epoch": 0.56, "grad_norm": 2.0565024358512694, "learning_rate": 0.0005895222835429791, "loss": 4.8294, "step": 490 }, { "epoch": 0.56, "grad_norm": 2.4590463691943576, "learning_rate": 0.0005894640125907579, "loss": 4.82, "step": 491 }, { "epoch": 0.56, "grad_norm": 1.3139952698871484, "learning_rate": 0.0005894055829490189, "loss": 4.6732, "step": 492 }, { "epoch": 0.56, "grad_norm": 1.4495280791730436, "learning_rate": 0.0005893469946497945, "loss": 4.7964, "step": 493 }, { "epoch": 0.56, "grad_norm": 1.5454952970252716, "learning_rate": 0.0005892882477252038, "loss": 4.841, "step": 494 }, { "epoch": 0.57, "grad_norm": 1.130661382286492, "learning_rate": 0.0005892293422074531, "loss": 4.6413, "step": 495 }, { "epoch": 0.57, "grad_norm": 1.2142863255105913, "learning_rate": 0.0005891702781288353, "loss": 4.7086, "step": 496 }, { "epoch": 0.57, "grad_norm": 0.949933694591531, "learning_rate": 0.0005891110555217306, "loss": 4.7153, "step": 497 }, { "epoch": 0.57, "grad_norm": 0.8936503977226508, "learning_rate": 0.0005890516744186058, "loss": 4.6979, "step": 498 }, { "epoch": 0.57, "grad_norm": 0.7925834791328705, "learning_rate": 0.0005889921348520146, "loss": 4.5879, "step": 499 }, { "epoch": 0.57, "grad_norm": 1.008509942513787, "learning_rate": 0.000588932436854598, "loss": 4.6122, "step": 500 }, { "epoch": 0.57, "eval_blimp_filtered_avg": 0.5132835820895523, "eval_blimp_filtered_std": 0.0053635566345865944, "step": 500 }, { "epoch": 0.57, "eval_blimp_supplement_avg": 0.47413793103448276, "eval_blimp_supplement_std": 0.023246279862775738, "step": 500 }, { "epoch": 0.57, "eval_vqa_filtered_avg": 0.17, "eval_vqa_filtered_std": 0.03775251680686371, "step": 500 }, { "epoch": 0.57, "eval_winoground_filtered_avg": 0.55, "eval_winoground_filtered_std": 0.04999999999999999, "step": 500 }, { "epoch": 0.57, "grad_norm": 0.8171008064621451, "learning_rate": 0.0005888725804590833, "loss": 4.7365, "step": 501 }, { "epoch": 0.57, "grad_norm": 1.838535759957165, "learning_rate": 0.000588812565698285, "loss": 5.02, "step": 502 }, { "epoch": 0.57, "grad_norm": 2.1205922192554914, "learning_rate": 0.0005887523926051042, "loss": 4.654, "step": 503 }, { "epoch": 0.58, "grad_norm": 1.1596528479611834, "learning_rate": 0.000588692061212529, "loss": 4.7663, "step": 504 }, { "epoch": 0.58, "grad_norm": 1.1993983992227317, "learning_rate": 0.0005886315715536341, "loss": 4.8463, "step": 505 }, { "epoch": 0.58, "grad_norm": 1.9718479619370148, "learning_rate": 0.0005885709236615812, "loss": 4.8378, "step": 506 }, { "epoch": 0.58, "grad_norm": 0.6810953901416547, "learning_rate": 0.0005885101175696185, "loss": 4.608, "step": 507 }, { "epoch": 0.58, "grad_norm": 1.9173504377743096, "learning_rate": 0.000588449153311081, "loss": 4.7006, "step": 508 }, { "epoch": 0.58, "grad_norm": 1.9626426105656214, "learning_rate": 0.0005883880309193903, "loss": 4.6051, "step": 509 }, { "epoch": 0.58, "grad_norm": 0.9126052009767455, "learning_rate": 0.0005883267504280551, "loss": 4.7994, "step": 510 }, { "epoch": 0.58, "grad_norm": 2.563384748450241, "learning_rate": 0.0005882653118706704, "loss": 4.7805, "step": 511 }, { "epoch": 0.59, "grad_norm": 1.7646173077297718, "learning_rate": 0.0005882037152809179, "loss": 4.7342, "step": 512 }, { "epoch": 0.59, "grad_norm": 1.315376183994279, "learning_rate": 0.000588141960692566, "loss": 4.7335, "step": 513 }, { "epoch": 0.59, "grad_norm": 3.017408881181837, "learning_rate": 0.0005880800481394696, "loss": 4.8928, "step": 514 }, { "epoch": 0.59, "grad_norm": 1.5355368051852243, "learning_rate": 0.0005880179776555706, "loss": 4.5725, "step": 515 }, { "epoch": 0.59, "grad_norm": 1.6167280132290474, "learning_rate": 0.0005879557492748968, "loss": 4.5813, "step": 516 }, { "epoch": 0.59, "grad_norm": 1.57654718341341, "learning_rate": 0.0005878933630315633, "loss": 4.696, "step": 517 }, { "epoch": 0.59, "grad_norm": 1.3657389484154014, "learning_rate": 0.0005878308189597712, "loss": 4.7612, "step": 518 }, { "epoch": 0.59, "grad_norm": 3.051504319769276, "learning_rate": 0.0005877681170938084, "loss": 4.7385, "step": 519 }, { "epoch": 0.59, "grad_norm": 1.7348003082550534, "learning_rate": 0.000587705257468049, "loss": 4.6694, "step": 520 }, { "epoch": 0.6, "grad_norm": 3.5503873834728132, "learning_rate": 0.0005876422401169542, "loss": 4.7072, "step": 521 }, { "epoch": 0.6, "grad_norm": 3.2854321738868144, "learning_rate": 0.0005875790650750711, "loss": 4.7978, "step": 522 }, { "epoch": 0.6, "grad_norm": 2.06854332718905, "learning_rate": 0.0005875157323770332, "loss": 4.6417, "step": 523 }, { "epoch": 0.6, "grad_norm": 2.8834146160449166, "learning_rate": 0.0005874522420575609, "loss": 4.6135, "step": 524 }, { "epoch": 0.6, "grad_norm": 1.8619124332877415, "learning_rate": 0.0005873885941514608, "loss": 4.8998, "step": 525 }, { "epoch": 0.6, "grad_norm": 2.139084953681298, "learning_rate": 0.0005873247886936257, "loss": 4.6173, "step": 526 }, { "epoch": 0.6, "grad_norm": 2.3682004498947644, "learning_rate": 0.0005872608257190349, "loss": 4.8124, "step": 527 }, { "epoch": 0.6, "grad_norm": 2.1919305740960056, "learning_rate": 0.0005871967052627542, "loss": 4.9145, "step": 528 }, { "epoch": 0.6, "grad_norm": 1.8074083573089332, "learning_rate": 0.0005871324273599357, "loss": 4.6684, "step": 529 }, { "epoch": 0.61, "grad_norm": 1.760381800230039, "learning_rate": 0.0005870679920458176, "loss": 4.5479, "step": 530 }, { "epoch": 0.61, "grad_norm": 2.7358811547781055, "learning_rate": 0.0005870033993557246, "loss": 4.4928, "step": 531 }, { "epoch": 0.61, "grad_norm": 2.7177674714992777, "learning_rate": 0.0005869386493250674, "loss": 4.5819, "step": 532 }, { "epoch": 0.61, "grad_norm": 1.9799414964901767, "learning_rate": 0.0005868737419893435, "loss": 4.6163, "step": 533 }, { "epoch": 0.61, "grad_norm": 1.7802944079199146, "learning_rate": 0.0005868086773841361, "loss": 4.6801, "step": 534 }, { "epoch": 0.61, "grad_norm": 0.9483623847876221, "learning_rate": 0.0005867434555451148, "loss": 4.4441, "step": 535 }, { "epoch": 0.61, "grad_norm": 1.1427873743960972, "learning_rate": 0.0005866780765080355, "loss": 4.5677, "step": 536 }, { "epoch": 0.61, "grad_norm": 1.9405423787182783, "learning_rate": 0.00058661254030874, "loss": 4.6599, "step": 537 }, { "epoch": 0.61, "grad_norm": 1.2256930935071564, "learning_rate": 0.0005865468469831569, "loss": 4.6746, "step": 538 }, { "epoch": 0.62, "grad_norm": 0.909741318211389, "learning_rate": 0.0005864809965672999, "loss": 4.4962, "step": 539 }, { "epoch": 0.62, "grad_norm": 1.5112603120659025, "learning_rate": 0.00058641498909727, "loss": 4.5661, "step": 540 }, { "epoch": 0.62, "grad_norm": 0.969409632098589, "learning_rate": 0.0005863488246092533, "loss": 4.4646, "step": 541 }, { "epoch": 0.62, "grad_norm": 1.770211682865135, "learning_rate": 0.0005862825031395225, "loss": 4.5369, "step": 542 }, { "epoch": 0.62, "grad_norm": 1.6040077016601832, "learning_rate": 0.0005862160247244363, "loss": 4.6617, "step": 543 }, { "epoch": 0.62, "grad_norm": 1.8940227446212239, "learning_rate": 0.0005861493894004394, "loss": 4.6954, "step": 544 }, { "epoch": 0.62, "grad_norm": 1.2910386809712782, "learning_rate": 0.0005860825972040626, "loss": 4.5581, "step": 545 }, { "epoch": 0.62, "grad_norm": 1.2131410991141303, "learning_rate": 0.0005860156481719224, "loss": 4.5131, "step": 546 }, { "epoch": 0.63, "grad_norm": 1.120317683234146, "learning_rate": 0.0005859485423407216, "loss": 4.6343, "step": 547 }, { "epoch": 0.63, "grad_norm": 1.310473575372834, "learning_rate": 0.0005858812797472489, "loss": 4.6142, "step": 548 }, { "epoch": 0.63, "grad_norm": 1.273331360638896, "learning_rate": 0.0005858138604283789, "loss": 4.5779, "step": 549 }, { "epoch": 0.63, "grad_norm": 0.9570707889363734, "learning_rate": 0.000585746284421072, "loss": 4.5162, "step": 550 }, { "epoch": 0.63, "grad_norm": 1.392164932188141, "learning_rate": 0.0005856785517623747, "loss": 4.6816, "step": 551 }, { "epoch": 0.63, "grad_norm": 1.2412222757338822, "learning_rate": 0.0005856106624894193, "loss": 4.7816, "step": 552 }, { "epoch": 0.63, "grad_norm": 1.8183044724648039, "learning_rate": 0.0005855426166394239, "loss": 4.2305, "step": 553 }, { "epoch": 0.63, "grad_norm": 2.0650767565610204, "learning_rate": 0.0005854744142496926, "loss": 4.5111, "step": 554 }, { "epoch": 0.63, "grad_norm": 1.7537259644216572, "learning_rate": 0.000585406055357615, "loss": 4.6985, "step": 555 }, { "epoch": 0.64, "grad_norm": 1.4266868765331735, "learning_rate": 0.000585337540000667, "loss": 4.751, "step": 556 }, { "epoch": 0.64, "grad_norm": 1.508195153365269, "learning_rate": 0.0005852688682164098, "loss": 4.3793, "step": 557 }, { "epoch": 0.64, "grad_norm": 2.5526958022670714, "learning_rate": 0.0005852000400424905, "loss": 4.8661, "step": 558 }, { "epoch": 0.64, "grad_norm": 1.2189504667402664, "learning_rate": 0.0005851310555166422, "loss": 4.9176, "step": 559 }, { "epoch": 0.64, "grad_norm": 2.539330463467029, "learning_rate": 0.0005850619146766832, "loss": 4.8204, "step": 560 }, { "epoch": 0.64, "grad_norm": 1.3164693020536904, "learning_rate": 0.0005849926175605179, "loss": 4.6477, "step": 561 }, { "epoch": 0.64, "grad_norm": 1.5578744816238042, "learning_rate": 0.0005849231642061363, "loss": 4.4022, "step": 562 }, { "epoch": 0.64, "grad_norm": 1.6624762082883593, "learning_rate": 0.0005848535546516141, "loss": 4.5103, "step": 563 }, { "epoch": 0.64, "grad_norm": 1.2319069138790335, "learning_rate": 0.0005847837889351124, "loss": 4.3334, "step": 564 }, { "epoch": 0.65, "grad_norm": 1.6544071013957493, "learning_rate": 0.000584713867094878, "loss": 4.5528, "step": 565 }, { "epoch": 0.65, "grad_norm": 1.078503282807618, "learning_rate": 0.0005846437891692434, "loss": 4.6397, "step": 566 }, { "epoch": 0.65, "grad_norm": 2.5058048319289474, "learning_rate": 0.0005845735551966269, "loss": 4.4403, "step": 567 }, { "epoch": 0.65, "grad_norm": 3.134817404312362, "learning_rate": 0.0005845031652155316, "loss": 4.55, "step": 568 }, { "epoch": 0.65, "grad_norm": 1.0311693850083417, "learning_rate": 0.0005844326192645468, "loss": 4.6061, "step": 569 }, { "epoch": 0.65, "grad_norm": 1.6730180836450599, "learning_rate": 0.0005843619173823471, "loss": 4.6309, "step": 570 }, { "epoch": 0.65, "grad_norm": 1.7976482537109384, "learning_rate": 0.0005842910596076926, "loss": 4.4306, "step": 571 }, { "epoch": 0.65, "grad_norm": 1.2926770517049064, "learning_rate": 0.0005842200459794288, "loss": 4.247, "step": 572 }, { "epoch": 0.65, "grad_norm": 0.8722058193531201, "learning_rate": 0.0005841488765364868, "loss": 4.4775, "step": 573 }, { "epoch": 0.66, "grad_norm": 1.579031044168635, "learning_rate": 0.0005840775513178827, "loss": 4.5176, "step": 574 }, { "epoch": 0.66, "grad_norm": 1.8003774524978253, "learning_rate": 0.0005840060703627186, "loss": 4.5182, "step": 575 }, { "epoch": 0.66, "grad_norm": 1.1941118664404586, "learning_rate": 0.0005839344337101816, "loss": 4.3376, "step": 576 }, { "epoch": 0.66, "grad_norm": 1.3887008061646828, "learning_rate": 0.0005838626413995441, "loss": 4.4326, "step": 577 }, { "epoch": 0.66, "grad_norm": 3.3139035284049734, "learning_rate": 0.0005837906934701642, "loss": 4.3924, "step": 578 }, { "epoch": 0.66, "grad_norm": 3.3568370387124467, "learning_rate": 0.0005837185899614848, "loss": 4.3549, "step": 579 }, { "epoch": 0.66, "grad_norm": 2.2993200214867993, "learning_rate": 0.0005836463309130348, "loss": 4.6231, "step": 580 }, { "epoch": 0.66, "grad_norm": 1.6477160313473223, "learning_rate": 0.0005835739163644275, "loss": 4.6646, "step": 581 }, { "epoch": 0.67, "grad_norm": 2.2185135423151476, "learning_rate": 0.0005835013463553622, "loss": 4.459, "step": 582 }, { "epoch": 0.67, "grad_norm": 1.4970352213018463, "learning_rate": 0.0005834286209256229, "loss": 4.5212, "step": 583 }, { "epoch": 0.67, "grad_norm": 1.3720074984260233, "learning_rate": 0.0005833557401150791, "loss": 4.6575, "step": 584 }, { "epoch": 0.67, "grad_norm": 1.429644051071723, "learning_rate": 0.0005832827039636855, "loss": 4.5239, "step": 585 }, { "epoch": 0.67, "grad_norm": 1.8866397930135517, "learning_rate": 0.0005832095125114818, "loss": 4.6404, "step": 586 }, { "epoch": 0.67, "grad_norm": 1.217479478831131, "learning_rate": 0.0005831361657985928, "loss": 4.5873, "step": 587 }, { "epoch": 0.67, "grad_norm": 1.3626623129430853, "learning_rate": 0.0005830626638652287, "loss": 4.3849, "step": 588 }, { "epoch": 0.67, "grad_norm": 3.123611645060079, "learning_rate": 0.0005829890067516845, "loss": 4.5972, "step": 589 }, { "epoch": 0.67, "grad_norm": 4.515740987022606, "learning_rate": 0.0005829151944983405, "loss": 4.5847, "step": 590 }, { "epoch": 0.68, "grad_norm": 1.373894020911828, "learning_rate": 0.0005828412271456618, "loss": 4.4942, "step": 591 }, { "epoch": 0.68, "grad_norm": 4.6168771308421395, "learning_rate": 0.0005827671047341988, "loss": 4.6232, "step": 592 }, { "epoch": 0.68, "grad_norm": 2.4466098061821766, "learning_rate": 0.0005826928273045866, "loss": 4.734, "step": 593 }, { "epoch": 0.68, "grad_norm": 2.5297440453794775, "learning_rate": 0.0005826183948975457, "loss": 4.6071, "step": 594 }, { "epoch": 0.68, "grad_norm": 3.6786734055206285, "learning_rate": 0.0005825438075538812, "loss": 4.5363, "step": 595 }, { "epoch": 0.68, "grad_norm": 1.378062849949167, "learning_rate": 0.0005824690653144833, "loss": 4.464, "step": 596 }, { "epoch": 0.68, "grad_norm": 1.5445799094739672, "learning_rate": 0.0005823941682203268, "loss": 4.3626, "step": 597 }, { "epoch": 0.68, "grad_norm": 1.9201647194414257, "learning_rate": 0.0005823191163124721, "loss": 4.4447, "step": 598 }, { "epoch": 0.68, "grad_norm": 1.4445228799635261, "learning_rate": 0.0005822439096320638, "loss": 4.6541, "step": 599 }, { "epoch": 0.69, "grad_norm": 1.599566862053309, "learning_rate": 0.0005821685482203315, "loss": 4.4155, "step": 600 }, { "epoch": 0.69, "eval_blimp_filtered_avg": 0.5270149253731343, "eval_blimp_filtered_std": 0.005412543717709778, "step": 600 }, { "epoch": 0.69, "eval_blimp_supplement_avg": 0.5452586206896551, "eval_blimp_supplement_std": 0.023051305575903118, "step": 600 }, { "epoch": 0.69, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 600 }, { "epoch": 0.69, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 600 }, { "epoch": 0.69, "grad_norm": 1.153028431537132, "learning_rate": 0.00058209303211859, "loss": 4.5192, "step": 601 }, { "epoch": 0.69, "grad_norm": 1.4375604090612792, "learning_rate": 0.0005820173613682385, "loss": 4.5397, "step": 602 }, { "epoch": 0.69, "grad_norm": 1.18253603496043, "learning_rate": 0.0005819415360107609, "loss": 4.5341, "step": 603 }, { "epoch": 0.69, "grad_norm": 1.5037700705204315, "learning_rate": 0.0005818655560877264, "loss": 4.5133, "step": 604 }, { "epoch": 0.69, "grad_norm": 1.3452045210709744, "learning_rate": 0.0005817894216407885, "loss": 4.4875, "step": 605 }, { "epoch": 0.69, "grad_norm": 0.8609470942745091, "learning_rate": 0.0005817131327116853, "loss": 4.6046, "step": 606 }, { "epoch": 0.69, "grad_norm": 1.520603707500873, "learning_rate": 0.0005816366893422399, "loss": 4.3918, "step": 607 }, { "epoch": 0.69, "grad_norm": 2.707395452289135, "learning_rate": 0.00058156009157436, "loss": 4.6315, "step": 608 }, { "epoch": 0.7, "grad_norm": 3.053411393029927, "learning_rate": 0.000581483339450038, "loss": 4.7913, "step": 609 }, { "epoch": 0.7, "grad_norm": 1.5492946681888873, "learning_rate": 0.0005814064330113509, "loss": 4.4569, "step": 610 }, { "epoch": 0.7, "grad_norm": 1.9012913476510545, "learning_rate": 0.0005813293723004598, "loss": 4.6044, "step": 611 }, { "epoch": 0.7, "grad_norm": 2.106278887515256, "learning_rate": 0.0005812521573596112, "loss": 4.4048, "step": 612 }, { "epoch": 0.7, "grad_norm": 3.2272607130511113, "learning_rate": 0.0005811747882311357, "loss": 4.4922, "step": 613 }, { "epoch": 0.7, "grad_norm": 3.1886127550625463, "learning_rate": 0.0005810972649574484, "loss": 4.5624, "step": 614 }, { "epoch": 0.7, "grad_norm": 2.8345762063578746, "learning_rate": 0.000581019587581049, "loss": 4.7792, "step": 615 }, { "epoch": 0.7, "grad_norm": 3.7306034009254923, "learning_rate": 0.0005809417561445217, "loss": 4.6113, "step": 616 }, { "epoch": 0.71, "grad_norm": 2.5817646888539425, "learning_rate": 0.0005808637706905351, "loss": 4.5571, "step": 617 }, { "epoch": 0.71, "grad_norm": 4.9360810794666365, "learning_rate": 0.0005807856312618423, "loss": 4.688, "step": 618 }, { "epoch": 0.71, "grad_norm": 3.6471596128698645, "learning_rate": 0.0005807073379012808, "loss": 4.6999, "step": 619 }, { "epoch": 0.71, "grad_norm": 4.382956047893064, "learning_rate": 0.0005806288906517724, "loss": 4.5704, "step": 620 }, { "epoch": 0.71, "grad_norm": 2.916998723576681, "learning_rate": 0.0005805502895563234, "loss": 4.6399, "step": 621 }, { "epoch": 0.71, "grad_norm": 2.829786587257529, "learning_rate": 0.0005804715346580243, "loss": 4.6934, "step": 622 }, { "epoch": 0.71, "grad_norm": 1.8514127585254476, "learning_rate": 0.00058039262600005, "loss": 4.5135, "step": 623 }, { "epoch": 0.71, "grad_norm": 1.9313034622764944, "learning_rate": 0.0005803135636256599, "loss": 4.7101, "step": 624 }, { "epoch": 0.71, "grad_norm": 1.8799230152237083, "learning_rate": 0.000580234347578197, "loss": 4.4219, "step": 625 }, { "epoch": 0.72, "grad_norm": 2.2563815904747493, "learning_rate": 0.0005801549779010894, "loss": 4.6391, "step": 626 }, { "epoch": 0.72, "grad_norm": 2.2995755511664884, "learning_rate": 0.0005800754546378488, "loss": 4.496, "step": 627 }, { "epoch": 0.72, "grad_norm": 2.3741097541216885, "learning_rate": 0.0005799957778320716, "loss": 4.3619, "step": 628 }, { "epoch": 0.72, "grad_norm": 2.970973439588761, "learning_rate": 0.0005799159475274377, "loss": 4.6854, "step": 629 }, { "epoch": 0.72, "grad_norm": 1.783998069759036, "learning_rate": 0.0005798359637677118, "loss": 4.6968, "step": 630 }, { "epoch": 0.72, "grad_norm": 3.2706158269709262, "learning_rate": 0.0005797558265967425, "loss": 4.3752, "step": 631 }, { "epoch": 0.72, "grad_norm": 1.543258477383476, "learning_rate": 0.0005796755360584624, "loss": 4.5127, "step": 632 }, { "epoch": 0.72, "grad_norm": 2.2735701888155337, "learning_rate": 0.0005795950921968882, "loss": 4.3989, "step": 633 }, { "epoch": 0.72, "grad_norm": 1.1896237403404126, "learning_rate": 0.0005795144950561207, "loss": 4.4731, "step": 634 }, { "epoch": 0.73, "grad_norm": 2.894490051112878, "learning_rate": 0.0005794337446803449, "loss": 4.4438, "step": 635 }, { "epoch": 0.73, "grad_norm": 1.6220094387318125, "learning_rate": 0.0005793528411138295, "loss": 4.3676, "step": 636 }, { "epoch": 0.73, "grad_norm": 2.9351225765386078, "learning_rate": 0.0005792717844009275, "loss": 4.6176, "step": 637 }, { "epoch": 0.73, "grad_norm": 0.8508859886264898, "learning_rate": 0.0005791905745860756, "loss": 4.504, "step": 638 }, { "epoch": 0.73, "grad_norm": 2.319166104294842, "learning_rate": 0.0005791092117137944, "loss": 4.5067, "step": 639 }, { "epoch": 0.73, "grad_norm": 1.7160917131649451, "learning_rate": 0.0005790276958286886, "loss": 4.5715, "step": 640 }, { "epoch": 0.73, "grad_norm": 2.7815471157488063, "learning_rate": 0.0005789460269754467, "loss": 4.4707, "step": 641 }, { "epoch": 0.73, "grad_norm": 1.8560041999946482, "learning_rate": 0.0005788642051988413, "loss": 4.3444, "step": 642 }, { "epoch": 0.73, "grad_norm": 3.6850493635169523, "learning_rate": 0.0005787822305437283, "loss": 4.6303, "step": 643 }, { "epoch": 0.74, "grad_norm": 1.2599905854419227, "learning_rate": 0.0005787001030550478, "loss": 4.6004, "step": 644 }, { "epoch": 0.74, "grad_norm": 2.1079452933649114, "learning_rate": 0.0005786178227778236, "loss": 4.4919, "step": 645 }, { "epoch": 0.74, "grad_norm": 1.2731969815583444, "learning_rate": 0.0005785353897571635, "loss": 4.562, "step": 646 }, { "epoch": 0.74, "grad_norm": 0.9856625670721976, "learning_rate": 0.0005784528040382585, "loss": 4.6699, "step": 647 }, { "epoch": 0.74, "grad_norm": 1.7162728712097575, "learning_rate": 0.0005783700656663838, "loss": 4.5084, "step": 648 }, { "epoch": 0.74, "grad_norm": 2.151568007916394, "learning_rate": 0.000578287174686898, "loss": 4.4242, "step": 649 }, { "epoch": 0.74, "grad_norm": 1.0549135843405837, "learning_rate": 0.0005782041311452436, "loss": 4.2738, "step": 650 }, { "epoch": 0.74, "grad_norm": 2.0730888488878283, "learning_rate": 0.0005781209350869464, "loss": 4.5284, "step": 651 }, { "epoch": 0.75, "grad_norm": 1.4718274032693577, "learning_rate": 0.0005780375865576161, "loss": 4.3566, "step": 652 }, { "epoch": 0.75, "grad_norm": 1.0415134553124974, "learning_rate": 0.0005779540856029458, "loss": 4.4505, "step": 653 }, { "epoch": 0.75, "grad_norm": 2.4767026620698656, "learning_rate": 0.0005778704322687127, "loss": 4.5071, "step": 654 }, { "epoch": 0.75, "grad_norm": 1.5899963323412976, "learning_rate": 0.0005777866266007766, "loss": 4.4988, "step": 655 }, { "epoch": 0.75, "grad_norm": 4.411916827894673, "learning_rate": 0.0005777026686450816, "loss": 4.4039, "step": 656 }, { "epoch": 0.75, "grad_norm": 2.0378019425329286, "learning_rate": 0.0005776185584476548, "loss": 4.4763, "step": 657 }, { "epoch": 0.75, "grad_norm": 2.9649669942121437, "learning_rate": 0.0005775342960546071, "loss": 4.2827, "step": 658 }, { "epoch": 0.75, "grad_norm": 1.3561041938087637, "learning_rate": 0.0005774498815121327, "loss": 4.5832, "step": 659 }, { "epoch": 0.75, "grad_norm": 2.2945572331672914, "learning_rate": 0.0005773653148665093, "loss": 4.5525, "step": 660 }, { "epoch": 0.76, "grad_norm": 2.314855258008567, "learning_rate": 0.0005772805961640976, "loss": 4.6166, "step": 661 }, { "epoch": 0.76, "grad_norm": 2.5718702502026827, "learning_rate": 0.0005771957254513422, "loss": 4.5738, "step": 662 }, { "epoch": 0.76, "grad_norm": 2.41905268099026, "learning_rate": 0.0005771107027747708, "loss": 4.5421, "step": 663 }, { "epoch": 0.76, "grad_norm": 4.766471664055671, "learning_rate": 0.0005770255281809943, "loss": 4.5963, "step": 664 }, { "epoch": 0.76, "grad_norm": 6.299489263008332, "learning_rate": 0.000576940201716707, "loss": 4.5228, "step": 665 }, { "epoch": 0.76, "grad_norm": 1.1228439535348742, "learning_rate": 0.0005768547234286864, "loss": 4.6363, "step": 666 }, { "epoch": 0.76, "grad_norm": 5.6356197606505996, "learning_rate": 0.0005767690933637936, "loss": 4.5081, "step": 667 }, { "epoch": 0.76, "grad_norm": 1.9680715852537185, "learning_rate": 0.000576683311568972, "loss": 4.538, "step": 668 }, { "epoch": 0.76, "grad_norm": 2.888467494848188, "learning_rate": 0.0005765973780912493, "loss": 4.8447, "step": 669 }, { "epoch": 0.77, "grad_norm": 2.138442266066419, "learning_rate": 0.0005765112929777355, "loss": 4.4692, "step": 670 }, { "epoch": 0.77, "grad_norm": 1.6728757287027467, "learning_rate": 0.0005764250562756242, "loss": 4.2637, "step": 671 }, { "epoch": 0.77, "grad_norm": 3.3050192140866717, "learning_rate": 0.0005763386680321919, "loss": 4.6787, "step": 672 }, { "epoch": 0.77, "grad_norm": 2.5816127137993963, "learning_rate": 0.0005762521282947983, "loss": 4.4092, "step": 673 }, { "epoch": 0.77, "grad_norm": 2.353428573324596, "learning_rate": 0.0005761654371108858, "loss": 4.8245, "step": 674 }, { "epoch": 0.77, "grad_norm": 3.4605311576050526, "learning_rate": 0.0005760785945279805, "loss": 4.5476, "step": 675 }, { "epoch": 0.77, "grad_norm": 2.522482702851574, "learning_rate": 0.0005759916005936909, "loss": 4.5458, "step": 676 }, { "epoch": 0.77, "grad_norm": 1.9927883305313956, "learning_rate": 0.0005759044553557087, "loss": 4.4768, "step": 677 }, { "epoch": 0.77, "grad_norm": 1.3209147673977002, "learning_rate": 0.0005758171588618086, "loss": 4.3871, "step": 678 }, { "epoch": 0.78, "grad_norm": 1.9847510770126404, "learning_rate": 0.0005757297111598481, "loss": 4.567, "step": 679 }, { "epoch": 0.78, "grad_norm": 1.9235732603896354, "learning_rate": 0.0005756421122977676, "loss": 4.5036, "step": 680 }, { "epoch": 0.78, "grad_norm": 1.8214497724997958, "learning_rate": 0.0005755543623235905, "loss": 4.4471, "step": 681 }, { "epoch": 0.78, "grad_norm": 2.172296730948008, "learning_rate": 0.0005754664612854229, "loss": 4.4955, "step": 682 }, { "epoch": 0.78, "grad_norm": 1.7239723242151066, "learning_rate": 0.0005753784092314538, "loss": 4.6132, "step": 683 }, { "epoch": 0.78, "grad_norm": 1.865043112615227, "learning_rate": 0.0005752902062099548, "loss": 4.416, "step": 684 }, { "epoch": 0.78, "grad_norm": 0.9982553889175986, "learning_rate": 0.0005752018522692808, "loss": 4.3722, "step": 685 }, { "epoch": 0.78, "grad_norm": 1.491077400324958, "learning_rate": 0.0005751133474578686, "loss": 4.4652, "step": 686 }, { "epoch": 0.79, "grad_norm": 1.1018609800900525, "learning_rate": 0.0005750246918242387, "loss": 4.36, "step": 687 }, { "epoch": 0.79, "grad_norm": 2.8016664728383263, "learning_rate": 0.0005749358854169932, "loss": 4.6413, "step": 688 }, { "epoch": 0.79, "grad_norm": 1.154897814606344, "learning_rate": 0.0005748469282848178, "loss": 4.6886, "step": 689 }, { "epoch": 0.79, "grad_norm": 2.4299272344966587, "learning_rate": 0.0005747578204764802, "loss": 4.4376, "step": 690 }, { "epoch": 0.79, "grad_norm": 0.933390787553071, "learning_rate": 0.0005746685620408312, "loss": 4.4609, "step": 691 }, { "epoch": 0.79, "grad_norm": 0.9915442022259469, "learning_rate": 0.0005745791530268037, "loss": 4.5932, "step": 692 }, { "epoch": 0.79, "grad_norm": 1.6728162714686678, "learning_rate": 0.0005744895934834135, "loss": 4.6454, "step": 693 }, { "epoch": 0.79, "grad_norm": 1.1689286642146983, "learning_rate": 0.0005743998834597588, "loss": 4.4759, "step": 694 }, { "epoch": 0.79, "grad_norm": 2.6772331296898217, "learning_rate": 0.0005743100230050202, "loss": 4.4271, "step": 695 }, { "epoch": 0.8, "grad_norm": 0.8146385050830827, "learning_rate": 0.0005742200121684609, "loss": 4.5155, "step": 696 }, { "epoch": 0.8, "grad_norm": 2.5340900372134594, "learning_rate": 0.0005741298509994265, "loss": 4.4396, "step": 697 }, { "epoch": 0.8, "grad_norm": 0.997599201699876, "learning_rate": 0.000574039539547345, "loss": 4.4462, "step": 698 }, { "epoch": 0.8, "grad_norm": 2.970713037666637, "learning_rate": 0.0005739490778617269, "loss": 4.4837, "step": 699 }, { "epoch": 0.8, "grad_norm": 1.13766441551782, "learning_rate": 0.0005738584659921649, "loss": 4.4534, "step": 700 }, { "epoch": 0.8, "eval_blimp_filtered_avg": 0.553134328358209, "eval_blimp_filtered_std": 0.005455587933379622, "step": 700 }, { "epoch": 0.8, "eval_blimp_supplement_avg": 0.5344827586206896, "eval_blimp_supplement_std": 0.022812448704690593, "step": 700 }, { "epoch": 0.8, "eval_vqa_filtered_avg": 0.29, "eval_vqa_filtered_std": 0.04560480215720683, "step": 700 }, { "epoch": 0.8, "eval_winoground_filtered_avg": 0.54, "eval_winoground_filtered_std": 0.05009082659620333, "step": 700 }, { "epoch": 0.8, "grad_norm": 1.532099053450028, "learning_rate": 0.0005737677039883341, "loss": 4.387, "step": 701 }, { "epoch": 0.8, "grad_norm": 0.8371789292717418, "learning_rate": 0.0005736767918999918, "loss": 4.5827, "step": 702 }, { "epoch": 0.8, "grad_norm": 2.325423868832235, "learning_rate": 0.0005735857297769778, "loss": 4.3446, "step": 703 }, { "epoch": 0.8, "grad_norm": 1.9279613868833123, "learning_rate": 0.0005734945176692143, "loss": 4.3637, "step": 704 }, { "epoch": 0.81, "grad_norm": 1.345789880027642, "learning_rate": 0.000573403155626705, "loss": 4.3231, "step": 705 }, { "epoch": 0.81, "grad_norm": 0.891641206435242, "learning_rate": 0.0005733116436995363, "loss": 4.4166, "step": 706 }, { "epoch": 0.81, "grad_norm": 2.7603105244784145, "learning_rate": 0.0005732199819378772, "loss": 4.4378, "step": 707 }, { "epoch": 0.81, "grad_norm": 1.292011873698199, "learning_rate": 0.0005731281703919779, "loss": 4.3836, "step": 708 }, { "epoch": 0.81, "grad_norm": 2.6493500158845205, "learning_rate": 0.0005730362091121712, "loss": 4.4418, "step": 709 }, { "epoch": 0.81, "grad_norm": 1.0544472718452804, "learning_rate": 0.0005729440981488723, "loss": 4.4469, "step": 710 }, { "epoch": 0.81, "grad_norm": 3.022547840216597, "learning_rate": 0.000572851837552578, "loss": 4.604, "step": 711 }, { "epoch": 0.81, "grad_norm": 1.2369711109434218, "learning_rate": 0.0005727594273738669, "loss": 4.3847, "step": 712 }, { "epoch": 0.81, "grad_norm": 2.297349688311103, "learning_rate": 0.0005726668676634005, "loss": 4.208, "step": 713 }, { "epoch": 0.82, "grad_norm": 1.405706658596966, "learning_rate": 0.0005725741584719216, "loss": 4.3919, "step": 714 }, { "epoch": 0.82, "grad_norm": 2.4577970425541227, "learning_rate": 0.0005724812998502548, "loss": 4.4645, "step": 715 }, { "epoch": 0.82, "grad_norm": 0.8851229245228893, "learning_rate": 0.0005723882918493073, "loss": 4.2623, "step": 716 }, { "epoch": 0.82, "grad_norm": 2.1703966323707973, "learning_rate": 0.0005722951345200674, "loss": 4.3944, "step": 717 }, { "epoch": 0.82, "grad_norm": 1.0292912458059562, "learning_rate": 0.000572201827913606, "loss": 4.3766, "step": 718 }, { "epoch": 0.82, "grad_norm": 2.0094306790503977, "learning_rate": 0.0005721083720810755, "loss": 4.5182, "step": 719 }, { "epoch": 0.82, "grad_norm": 1.1425603193754892, "learning_rate": 0.0005720147670737098, "loss": 4.5696, "step": 720 }, { "epoch": 0.82, "grad_norm": 1.3698665134131338, "learning_rate": 0.0005719210129428252, "loss": 4.0551, "step": 721 }, { "epoch": 0.83, "grad_norm": 1.8943244199712028, "learning_rate": 0.0005718271097398193, "loss": 4.4265, "step": 722 }, { "epoch": 0.83, "grad_norm": 2.394845270112468, "learning_rate": 0.0005717330575161717, "loss": 4.1211, "step": 723 }, { "epoch": 0.83, "grad_norm": 1.7801541279360003, "learning_rate": 0.0005716388563234434, "loss": 4.4524, "step": 724 }, { "epoch": 0.83, "grad_norm": 2.140343209570443, "learning_rate": 0.0005715445062132774, "loss": 4.4731, "step": 725 }, { "epoch": 0.83, "grad_norm": 1.1815931127587753, "learning_rate": 0.000571450007237398, "loss": 4.4272, "step": 726 }, { "epoch": 0.83, "grad_norm": 1.7310687566030418, "learning_rate": 0.0005713553594476114, "loss": 4.186, "step": 727 }, { "epoch": 0.83, "grad_norm": 1.2905058001831833, "learning_rate": 0.0005712605628958052, "loss": 4.4239, "step": 728 }, { "epoch": 0.83, "grad_norm": 3.2222497163663566, "learning_rate": 0.0005711656176339488, "loss": 4.512, "step": 729 }, { "epoch": 0.83, "grad_norm": 7.37575764752151, "learning_rate": 0.0005710705237140926, "loss": 4.4808, "step": 730 }, { "epoch": 0.84, "grad_norm": 5.943089443595224, "learning_rate": 0.0005709752811883693, "loss": 4.604, "step": 731 }, { "epoch": 0.84, "grad_norm": 3.6035895232186665, "learning_rate": 0.0005708798901089922, "loss": 4.3839, "step": 732 }, { "epoch": 0.84, "grad_norm": 9.848448183957649, "learning_rate": 0.0005707843505282566, "loss": 4.7996, "step": 733 }, { "epoch": 0.84, "grad_norm": 2.7634232222221975, "learning_rate": 0.0005706886624985392, "loss": 4.8332, "step": 734 }, { "epoch": 0.84, "grad_norm": 2.482064340844488, "learning_rate": 0.0005705928260722976, "loss": 5.0257, "step": 735 }, { "epoch": 0.84, "grad_norm": 2.651911902393482, "learning_rate": 0.0005704968413020715, "loss": 4.8522, "step": 736 }, { "epoch": 0.84, "grad_norm": 1.8238237841287386, "learning_rate": 0.0005704007082404812, "loss": 4.8846, "step": 737 }, { "epoch": 0.84, "grad_norm": 1.3715742405050868, "learning_rate": 0.0005703044269402288, "loss": 4.7145, "step": 738 }, { "epoch": 0.84, "grad_norm": 2.407277605814718, "learning_rate": 0.0005702079974540975, "loss": 4.6802, "step": 739 }, { "epoch": 0.85, "grad_norm": 3.0453486683081543, "learning_rate": 0.0005701114198349517, "loss": 4.8464, "step": 740 }, { "epoch": 0.85, "grad_norm": 2.724592874176681, "learning_rate": 0.0005700146941357369, "loss": 4.7677, "step": 741 }, { "epoch": 0.85, "grad_norm": 1.673521662411844, "learning_rate": 0.0005699178204094801, "loss": 4.68, "step": 742 }, { "epoch": 0.85, "grad_norm": 1.369231257596732, "learning_rate": 0.0005698207987092892, "loss": 4.6597, "step": 743 }, { "epoch": 0.85, "grad_norm": 2.416722707502322, "learning_rate": 0.0005697236290883533, "loss": 4.8169, "step": 744 }, { "epoch": 0.85, "grad_norm": 2.165726676004057, "learning_rate": 0.0005696263115999425, "loss": 4.6381, "step": 745 }, { "epoch": 0.85, "grad_norm": 1.6942329889251009, "learning_rate": 0.0005695288462974082, "loss": 4.4709, "step": 746 }, { "epoch": 0.85, "grad_norm": 2.015745170207427, "learning_rate": 0.0005694312332341824, "loss": 4.5296, "step": 747 }, { "epoch": 0.85, "grad_norm": 1.953013100441991, "learning_rate": 0.0005693334724637787, "loss": 4.5185, "step": 748 }, { "epoch": 0.86, "grad_norm": 2.114676833833503, "learning_rate": 0.0005692355640397911, "loss": 4.7051, "step": 749 }, { "epoch": 0.86, "grad_norm": 2.494260768573294, "learning_rate": 0.0005691375080158951, "loss": 4.3922, "step": 750 }, { "epoch": 0.86, "grad_norm": 3.5618183225144366, "learning_rate": 0.0005690393044458466, "loss": 4.6168, "step": 751 }, { "epoch": 0.86, "grad_norm": 3.019983992488592, "learning_rate": 0.0005689409533834825, "loss": 4.5672, "step": 752 }, { "epoch": 0.86, "grad_norm": 4.017533357070491, "learning_rate": 0.000568842454882721, "loss": 4.7495, "step": 753 }, { "epoch": 0.86, "grad_norm": 3.426341657146411, "learning_rate": 0.0005687438089975606, "loss": 4.5616, "step": 754 }, { "epoch": 0.86, "grad_norm": 1.475375144051202, "learning_rate": 0.0005686450157820808, "loss": 4.5466, "step": 755 }, { "epoch": 0.86, "grad_norm": 3.8075668304869077, "learning_rate": 0.0005685460752904419, "loss": 4.5189, "step": 756 }, { "epoch": 0.87, "grad_norm": 3.4745007306094204, "learning_rate": 0.000568446987576885, "loss": 4.5952, "step": 757 }, { "epoch": 0.87, "grad_norm": 1.3430123682395663, "learning_rate": 0.0005683477526957315, "loss": 4.4737, "step": 758 }, { "epoch": 0.87, "grad_norm": 3.3071858574030033, "learning_rate": 0.0005682483707013841, "loss": 4.6631, "step": 759 }, { "epoch": 0.87, "grad_norm": 2.129151518153412, "learning_rate": 0.0005681488416483259, "loss": 4.4614, "step": 760 }, { "epoch": 0.87, "grad_norm": 2.3496667672402167, "learning_rate": 0.0005680491655911202, "loss": 4.6929, "step": 761 }, { "epoch": 0.87, "grad_norm": 1.9927827409676904, "learning_rate": 0.0005679493425844116, "loss": 4.5659, "step": 762 }, { "epoch": 0.87, "grad_norm": 1.2389348480753737, "learning_rate": 0.0005678493726829247, "loss": 4.4976, "step": 763 }, { "epoch": 0.87, "grad_norm": 1.5569958291231294, "learning_rate": 0.000567749255941465, "loss": 4.6115, "step": 764 }, { "epoch": 0.87, "grad_norm": 1.9986881732505117, "learning_rate": 0.0005676489924149182, "loss": 4.62, "step": 765 }, { "epoch": 0.88, "grad_norm": 1.632447485757524, "learning_rate": 0.0005675485821582508, "loss": 4.666, "step": 766 }, { "epoch": 0.88, "grad_norm": 1.2834395674626455, "learning_rate": 0.0005674480252265094, "loss": 4.5658, "step": 767 }, { "epoch": 0.88, "grad_norm": 1.8659965772429834, "learning_rate": 0.0005673473216748212, "loss": 4.4568, "step": 768 }, { "epoch": 0.88, "grad_norm": 1.7367966031530824, "learning_rate": 0.0005672464715583938, "loss": 4.4019, "step": 769 }, { "epoch": 0.88, "grad_norm": 1.0357248293931145, "learning_rate": 0.0005671454749325151, "loss": 4.3319, "step": 770 }, { "epoch": 0.88, "grad_norm": 1.4118371068199673, "learning_rate": 0.0005670443318525532, "loss": 4.5843, "step": 771 }, { "epoch": 0.88, "grad_norm": 1.6320678035539897, "learning_rate": 0.0005669430423739568, "loss": 4.3038, "step": 772 }, { "epoch": 0.88, "grad_norm": 2.858698792622335, "learning_rate": 0.0005668416065522544, "loss": 4.2895, "step": 773 }, { "epoch": 0.88, "grad_norm": 1.829095201647142, "learning_rate": 0.0005667400244430551, "loss": 4.6098, "step": 774 }, { "epoch": 0.89, "grad_norm": 4.045990691717911, "learning_rate": 0.0005666382961020483, "loss": 4.4029, "step": 775 }, { "epoch": 0.89, "grad_norm": 1.8357215650205188, "learning_rate": 0.0005665364215850031, "loss": 4.4692, "step": 776 }, { "epoch": 0.89, "grad_norm": 1.4516546372915655, "learning_rate": 0.0005664344009477691, "loss": 4.3512, "step": 777 }, { "epoch": 0.89, "grad_norm": 1.1930442381589221, "learning_rate": 0.0005663322342462758, "loss": 4.5269, "step": 778 }, { "epoch": 0.89, "grad_norm": 1.692403511162347, "learning_rate": 0.0005662299215365331, "loss": 4.3754, "step": 779 }, { "epoch": 0.89, "grad_norm": 1.500655486702712, "learning_rate": 0.0005661274628746304, "loss": 4.4719, "step": 780 }, { "epoch": 0.89, "grad_norm": 1.4448450648992466, "learning_rate": 0.0005660248583167377, "loss": 4.7169, "step": 781 }, { "epoch": 0.89, "grad_norm": 1.0091761476074026, "learning_rate": 0.0005659221079191045, "loss": 4.5633, "step": 782 }, { "epoch": 0.89, "grad_norm": 1.925721765807762, "learning_rate": 0.0005658192117380606, "loss": 4.5922, "step": 783 }, { "epoch": 0.9, "grad_norm": 0.967872080142002, "learning_rate": 0.0005657161698300156, "loss": 4.5185, "step": 784 }, { "epoch": 0.9, "grad_norm": 1.3250771765072809, "learning_rate": 0.0005656129822514589, "loss": 4.3179, "step": 785 }, { "epoch": 0.9, "grad_norm": 1.2605820311888443, "learning_rate": 0.0005655096490589599, "loss": 4.2923, "step": 786 }, { "epoch": 0.9, "grad_norm": 1.6723937109250728, "learning_rate": 0.0005654061703091677, "loss": 4.1635, "step": 787 }, { "epoch": 0.9, "grad_norm": 1.1794030652620224, "learning_rate": 0.0005653025460588113, "loss": 4.3573, "step": 788 }, { "epoch": 0.9, "grad_norm": 1.4623138738350192, "learning_rate": 0.0005651987763646994, "loss": 4.4752, "step": 789 }, { "epoch": 0.9, "grad_norm": 2.0033312218999346, "learning_rate": 0.0005650948612837205, "loss": 4.5783, "step": 790 }, { "epoch": 0.9, "grad_norm": 1.622095944806616, "learning_rate": 0.0005649908008728429, "loss": 4.124, "step": 791 }, { "epoch": 0.91, "grad_norm": 3.4223052056056416, "learning_rate": 0.0005648865951891142, "loss": 4.3099, "step": 792 }, { "epoch": 0.91, "grad_norm": 3.3198590554655696, "learning_rate": 0.0005647822442896619, "loss": 4.4748, "step": 793 }, { "epoch": 0.91, "grad_norm": 4.783637270349249, "learning_rate": 0.000564677748231693, "loss": 4.4141, "step": 794 }, { "epoch": 0.91, "grad_norm": 1.940965363632524, "learning_rate": 0.0005645731070724947, "loss": 4.3795, "step": 795 }, { "epoch": 0.91, "grad_norm": 6.733091750642147, "learning_rate": 0.0005644683208694326, "loss": 4.514, "step": 796 }, { "epoch": 0.91, "grad_norm": 2.0683662670866614, "learning_rate": 0.0005643633896799528, "loss": 4.5301, "step": 797 }, { "epoch": 0.91, "grad_norm": 2.6429508219839413, "learning_rate": 0.0005642583135615803, "loss": 4.4977, "step": 798 }, { "epoch": 0.91, "grad_norm": 1.4669385770213428, "learning_rate": 0.00056415309257192, "loss": 4.5343, "step": 799 }, { "epoch": 0.91, "grad_norm": 1.5140112360435332, "learning_rate": 0.0005640477267686557, "loss": 4.47, "step": 800 }, { "epoch": 0.91, "eval_blimp_filtered_avg": 0.5392537313432836, "eval_blimp_filtered_std": 0.0055315890606076785, "step": 800 }, { "epoch": 0.91, "eval_blimp_supplement_avg": 0.5301724137931034, "eval_blimp_supplement_std": 0.02262080566474024, "step": 800 }, { "epoch": 0.91, "eval_vqa_filtered_avg": 0.17, "eval_vqa_filtered_std": 0.03775251680686371, "step": 800 }, { "epoch": 0.91, "eval_winoground_filtered_avg": 0.49, "eval_winoground_filtered_std": 0.05024183937956911, "step": 800 }, { "epoch": 0.92, "grad_norm": 3.5179163214378097, "learning_rate": 0.0005639422162095511, "loss": 4.1751, "step": 801 }, { "epoch": 0.92, "grad_norm": 2.6077674764452516, "learning_rate": 0.0005638365609524491, "loss": 4.3241, "step": 802 }, { "epoch": 0.92, "grad_norm": 2.6254048476525464, "learning_rate": 0.0005637307610552715, "loss": 4.3363, "step": 803 }, { "epoch": 0.92, "grad_norm": 2.6223143245718377, "learning_rate": 0.0005636248165760201, "loss": 4.3163, "step": 804 }, { "epoch": 0.92, "grad_norm": 2.946814217523914, "learning_rate": 0.0005635187275727756, "loss": 4.334, "step": 805 }, { "epoch": 0.92, "grad_norm": 3.851760967405565, "learning_rate": 0.0005634124941036977, "loss": 4.3922, "step": 806 }, { "epoch": 0.92, "grad_norm": 1.9260711573095337, "learning_rate": 0.0005633061162270258, "loss": 4.2261, "step": 807 }, { "epoch": 0.92, "grad_norm": 3.0713108268202687, "learning_rate": 0.0005631995940010782, "loss": 4.6197, "step": 808 }, { "epoch": 0.92, "grad_norm": 2.4059502725241124, "learning_rate": 0.0005630929274842521, "loss": 4.3439, "step": 809 }, { "epoch": 0.93, "grad_norm": 2.410281311586372, "learning_rate": 0.0005629861167350243, "loss": 4.4831, "step": 810 }, { "epoch": 0.93, "grad_norm": 2.7378744370954116, "learning_rate": 0.0005628791618119505, "loss": 4.1798, "step": 811 }, { "epoch": 0.93, "grad_norm": 1.2370648409165534, "learning_rate": 0.0005627720627736651, "loss": 4.4679, "step": 812 }, { "epoch": 0.93, "grad_norm": 1.667516410670082, "learning_rate": 0.0005626648196788819, "loss": 4.398, "step": 813 }, { "epoch": 0.93, "grad_norm": 1.6302774429509728, "learning_rate": 0.0005625574325863936, "loss": 4.4626, "step": 814 }, { "epoch": 0.93, "grad_norm": 1.721795133982151, "learning_rate": 0.0005624499015550716, "loss": 4.4726, "step": 815 }, { "epoch": 0.93, "grad_norm": 1.759611590186062, "learning_rate": 0.0005623422266438666, "loss": 4.2706, "step": 816 }, { "epoch": 0.93, "grad_norm": 1.5953318563213739, "learning_rate": 0.000562234407911808, "loss": 4.4214, "step": 817 }, { "epoch": 0.93, "grad_norm": 1.5267615564582564, "learning_rate": 0.0005621264454180037, "loss": 4.5114, "step": 818 }, { "epoch": 0.94, "grad_norm": 2.369872503239026, "learning_rate": 0.0005620183392216412, "loss": 4.1582, "step": 819 }, { "epoch": 0.94, "grad_norm": 1.2436422548990567, "learning_rate": 0.0005619100893819859, "loss": 4.3708, "step": 820 }, { "epoch": 0.94, "grad_norm": 2.826957117750034, "learning_rate": 0.0005618016959583825, "loss": 4.456, "step": 821 }, { "epoch": 0.94, "grad_norm": 2.0012567327677417, "learning_rate": 0.0005616931590102544, "loss": 4.5132, "step": 822 }, { "epoch": 0.94, "grad_norm": 3.907955608971358, "learning_rate": 0.0005615844785971035, "loss": 4.3166, "step": 823 }, { "epoch": 0.94, "grad_norm": 2.803442390900075, "learning_rate": 0.0005614756547785102, "loss": 4.303, "step": 824 }, { "epoch": 0.94, "grad_norm": 3.4337066317283753, "learning_rate": 0.0005613666876141341, "loss": 4.6722, "step": 825 }, { "epoch": 0.94, "grad_norm": 4.852406717139007, "learning_rate": 0.0005612575771637127, "loss": 4.3167, "step": 826 }, { "epoch": 0.95, "grad_norm": 1.8213680353396644, "learning_rate": 0.0005611483234870625, "loss": 4.3684, "step": 827 }, { "epoch": 0.95, "grad_norm": 1.8623478496037194, "learning_rate": 0.0005610389266440783, "loss": 4.4812, "step": 828 }, { "epoch": 0.95, "grad_norm": 2.2353003239424747, "learning_rate": 0.0005609293866947336, "loss": 4.4897, "step": 829 }, { "epoch": 0.95, "grad_norm": 2.964179452287191, "learning_rate": 0.0005608197036990802, "loss": 4.6249, "step": 830 }, { "epoch": 0.95, "grad_norm": 2.4640227261806573, "learning_rate": 0.0005607098777172482, "loss": 4.3987, "step": 831 }, { "epoch": 0.95, "grad_norm": 3.218959984083028, "learning_rate": 0.0005605999088094464, "loss": 4.6089, "step": 832 }, { "epoch": 0.95, "grad_norm": 18.624015607241194, "learning_rate": 0.0005604897970359617, "loss": 4.4225, "step": 833 }, { "epoch": 0.95, "grad_norm": 1.594156337694963, "learning_rate": 0.0005603795424571593, "loss": 4.4438, "step": 834 }, { "epoch": 0.95, "grad_norm": 2.9215600128270105, "learning_rate": 0.0005602691451334831, "loss": 4.658, "step": 835 }, { "epoch": 0.96, "grad_norm": 3.5097119339315124, "learning_rate": 0.0005601586051254548, "loss": 4.6479, "step": 836 }, { "epoch": 0.96, "grad_norm": 2.5698245992094395, "learning_rate": 0.0005600479224936743, "loss": 4.5435, "step": 837 }, { "epoch": 0.96, "grad_norm": 1.470065444373569, "learning_rate": 0.00055993709729882, "loss": 4.6083, "step": 838 }, { "epoch": 0.96, "grad_norm": 2.546118390346671, "learning_rate": 0.0005598261296016483, "loss": 4.4647, "step": 839 }, { "epoch": 0.96, "grad_norm": 4.319978301056108, "learning_rate": 0.000559715019462994, "loss": 4.609, "step": 840 }, { "epoch": 0.96, "grad_norm": 2.1188781892482322, "learning_rate": 0.0005596037669437692, "loss": 4.301, "step": 841 }, { "epoch": 0.96, "grad_norm": 2.4374142383099775, "learning_rate": 0.0005594923721049651, "loss": 4.2956, "step": 842 }, { "epoch": 0.96, "grad_norm": 16.332017027734327, "learning_rate": 0.0005593808350076501, "loss": 4.5892, "step": 843 }, { "epoch": 0.96, "grad_norm": 2.1534559813597, "learning_rate": 0.000559269155712971, "loss": 4.4758, "step": 844 }, { "epoch": 0.97, "grad_norm": 2.0327205284902234, "learning_rate": 0.0005591573342821524, "loss": 4.8934, "step": 845 }, { "epoch": 0.97, "grad_norm": 2.305301294735738, "learning_rate": 0.000559045370776497, "loss": 4.6047, "step": 846 }, { "epoch": 0.97, "grad_norm": 1.4493363946470963, "learning_rate": 0.000558933265257385, "loss": 4.6245, "step": 847 }, { "epoch": 0.97, "grad_norm": 1.5799028275658689, "learning_rate": 0.0005588210177862751, "loss": 4.5984, "step": 848 }, { "epoch": 0.97, "grad_norm": 1.4887415794623957, "learning_rate": 0.0005587086284247031, "loss": 4.7335, "step": 849 }, { "epoch": 0.97, "grad_norm": 1.7259266727108415, "learning_rate": 0.0005585960972342831, "loss": 4.6991, "step": 850 }, { "epoch": 0.97, "grad_norm": 1.5623247023516345, "learning_rate": 0.0005584834242767067, "loss": 4.4171, "step": 851 }, { "epoch": 0.97, "grad_norm": 1.2758342697998015, "learning_rate": 0.0005583706096137433, "loss": 4.2991, "step": 852 }, { "epoch": 0.97, "grad_norm": 7.35947113452101, "learning_rate": 0.0005582576533072401, "loss": 4.44, "step": 853 }, { "epoch": 0.98, "grad_norm": 2.9811034866921764, "learning_rate": 0.0005581445554191217, "loss": 4.7852, "step": 854 }, { "epoch": 0.98, "grad_norm": 3.808423969727198, "learning_rate": 0.0005580313160113904, "loss": 4.7576, "step": 855 }, { "epoch": 0.98, "grad_norm": 2.1381114754339956, "learning_rate": 0.0005579179351461263, "loss": 4.7348, "step": 856 }, { "epoch": 0.98, "grad_norm": 3.12222792436702, "learning_rate": 0.0005578044128854868, "loss": 4.784, "step": 857 }, { "epoch": 0.98, "grad_norm": 3.770492013846354, "learning_rate": 0.0005576907492917069, "loss": 4.6636, "step": 858 }, { "epoch": 0.98, "grad_norm": 2.644363299562771, "learning_rate": 0.0005575769444270991, "loss": 4.7517, "step": 859 }, { "epoch": 0.98, "grad_norm": 20.397409079885954, "learning_rate": 0.0005574629983540534, "loss": 4.9617, "step": 860 }, { "epoch": 0.98, "grad_norm": 4.305933257746408, "learning_rate": 0.0005573489111350369, "loss": 4.9096, "step": 861 }, { "epoch": 0.99, "grad_norm": 2.3533563249940763, "learning_rate": 0.0005572346828325946, "loss": 4.7782, "step": 862 }, { "epoch": 0.99, "grad_norm": 2.2372069206452614, "learning_rate": 0.0005571203135093484, "loss": 4.7903, "step": 863 }, { "epoch": 0.99, "grad_norm": 2.3236152828187104, "learning_rate": 0.0005570058032279977, "loss": 4.7257, "step": 864 }, { "epoch": 0.99, "grad_norm": 2.782334251720394, "learning_rate": 0.0005568911520513192, "loss": 4.749, "step": 865 }, { "epoch": 0.99, "grad_norm": 2.204238082188978, "learning_rate": 0.0005567763600421668, "loss": 4.6186, "step": 866 }, { "epoch": 0.99, "grad_norm": 3.421970105666145, "learning_rate": 0.0005566614272634715, "loss": 4.8092, "step": 867 }, { "epoch": 0.99, "grad_norm": 2.152511146372104, "learning_rate": 0.0005565463537782417, "loss": 4.408, "step": 868 }, { "epoch": 0.99, "grad_norm": 3.061902487254796, "learning_rate": 0.0005564311396495628, "loss": 4.7927, "step": 869 }, { "epoch": 0.99, "grad_norm": 2.000914706693894, "learning_rate": 0.0005563157849405972, "loss": 4.8562, "step": 870 }, { "epoch": 1.0, "grad_norm": 1.935337558021705, "learning_rate": 0.0005562002897145843, "loss": 4.8541, "step": 871 }, { "epoch": 1.0, "grad_norm": 2.0460604942973273, "learning_rate": 0.0005560846540348412, "loss": 4.5184, "step": 872 }, { "epoch": 1.0, "grad_norm": 5.514531003814646, "learning_rate": 0.0005559688779647613, "loss": 4.6261, "step": 873 }, { "epoch": 1.0, "grad_norm": 1.355242705862316, "learning_rate": 0.0005558529615678151, "loss": 4.5235, "step": 874 }, { "epoch": 1.0, "grad_norm": 1.5685983195498576, "learning_rate": 0.0005557369049075501, "loss": 4.543, "step": 875 }, { "epoch": 1.0, "grad_norm": 1.1305602246095083, "learning_rate": 0.0005556207080475908, "loss": 4.7276, "step": 876 }, { "epoch": 1.0, "grad_norm": 2.812624023379587, "learning_rate": 0.0005555043710516385, "loss": 4.3129, "step": 877 }, { "epoch": 1.0, "grad_norm": 2.8368991001962853, "learning_rate": 0.0005553878939834711, "loss": 4.8652, "step": 878 }, { "epoch": 1.0, "grad_norm": 1.675058269469499, "learning_rate": 0.0005552712769069436, "loss": 4.6468, "step": 879 }, { "epoch": 1.01, "grad_norm": 1.940217263690691, "learning_rate": 0.0005551545198859878, "loss": 4.5284, "step": 880 }, { "epoch": 1.01, "grad_norm": 1.3380200472098052, "learning_rate": 0.0005550376229846118, "loss": 4.5162, "step": 881 }, { "epoch": 1.01, "grad_norm": 1.7943092863661791, "learning_rate": 0.0005549205862669007, "loss": 4.5313, "step": 882 }, { "epoch": 1.01, "grad_norm": 1.3517825814193654, "learning_rate": 0.0005548034097970162, "loss": 4.4875, "step": 883 }, { "epoch": 1.01, "grad_norm": 3.5046003994965984, "learning_rate": 0.0005546860936391967, "loss": 4.4137, "step": 884 }, { "epoch": 1.01, "grad_norm": 2.1779626601970765, "learning_rate": 0.0005545686378577569, "loss": 4.5479, "step": 885 }, { "epoch": 1.01, "grad_norm": 1.7561828405029052, "learning_rate": 0.0005544510425170885, "loss": 4.3525, "step": 886 }, { "epoch": 1.01, "grad_norm": 1.2917986123962655, "learning_rate": 0.0005543333076816592, "loss": 4.8366, "step": 887 }, { "epoch": 1.01, "grad_norm": 1.8030060265537868, "learning_rate": 0.0005542154334160134, "loss": 4.4555, "step": 888 }, { "epoch": 1.02, "grad_norm": 6.045466852042441, "learning_rate": 0.0005540974197847722, "loss": 4.5348, "step": 889 }, { "epoch": 1.02, "grad_norm": 1.9138919386184405, "learning_rate": 0.0005539792668526326, "loss": 4.5398, "step": 890 }, { "epoch": 1.02, "grad_norm": 2.1354413532255196, "learning_rate": 0.0005538609746843682, "loss": 4.434, "step": 891 }, { "epoch": 1.02, "grad_norm": 1.2962485242921866, "learning_rate": 0.0005537425433448293, "loss": 4.7233, "step": 892 }, { "epoch": 1.02, "grad_norm": 2.702640807653589, "learning_rate": 0.0005536239728989417, "loss": 4.4054, "step": 893 }, { "epoch": 1.02, "grad_norm": 2.259799544711849, "learning_rate": 0.0005535052634117082, "loss": 4.5594, "step": 894 }, { "epoch": 1.02, "grad_norm": 3.49951362474867, "learning_rate": 0.0005533864149482074, "loss": 4.5235, "step": 895 }, { "epoch": 1.02, "grad_norm": 1.7013173924549674, "learning_rate": 0.0005532674275735943, "loss": 4.5331, "step": 896 }, { "epoch": 1.03, "grad_norm": 6.659914839210145, "learning_rate": 0.0005531483013531001, "loss": 4.3455, "step": 897 }, { "epoch": 1.03, "grad_norm": 3.7170247295286347, "learning_rate": 0.0005530290363520316, "loss": 4.7108, "step": 898 }, { "epoch": 1.03, "grad_norm": 3.3659989270849913, "learning_rate": 0.0005529096326357724, "loss": 4.3952, "step": 899 }, { "epoch": 1.03, "grad_norm": 2.3029658549539516, "learning_rate": 0.0005527900902697818, "loss": 4.4337, "step": 900 }, { "epoch": 1.03, "eval_blimp_filtered_avg": 0.5513432835820895, "eval_blimp_filtered_std": 0.005543231209120804, "step": 900 }, { "epoch": 1.03, "eval_blimp_supplement_avg": 0.5193965517241379, "eval_blimp_supplement_std": 0.02223243229499718, "step": 900 }, { "epoch": 1.03, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 900 }, { "epoch": 1.03, "eval_winoground_filtered_avg": 0.48, "eval_winoground_filtered_std": 0.05021167315686779, "step": 900 }, { "epoch": 1.03, "grad_norm": 30.208975867059706, "learning_rate": 0.000552670409319595, "loss": 4.7352, "step": 901 }, { "epoch": 1.03, "grad_norm": 5.168300838753353, "learning_rate": 0.0005525505898508234, "loss": 5.5759, "step": 902 }, { "epoch": 1.03, "grad_norm": 1.8935145281537624, "learning_rate": 0.0005524306319291543, "loss": 5.2075, "step": 903 }, { "epoch": 1.03, "grad_norm": 2.5855244713279215, "learning_rate": 0.0005523105356203507, "loss": 5.3973, "step": 904 }, { "epoch": 1.03, "grad_norm": 2.573845891636085, "learning_rate": 0.0005521903009902516, "loss": 5.4294, "step": 905 }, { "epoch": 1.04, "grad_norm": 2.6506058681087703, "learning_rate": 0.0005520699281047722, "loss": 5.3886, "step": 906 }, { "epoch": 1.04, "grad_norm": 6.083833002217966, "learning_rate": 0.0005519494170299026, "loss": 5.1987, "step": 907 }, { "epoch": 1.04, "grad_norm": 2.337458107345143, "learning_rate": 0.0005518287678317096, "loss": 5.1824, "step": 908 }, { "epoch": 1.04, "grad_norm": 138.70356979361264, "learning_rate": 0.0005517079805763352, "loss": 5.2551, "step": 909 }, { "epoch": 1.04, "grad_norm": 1.585243764953989, "learning_rate": 0.0005515870553299971, "loss": 5.0877, "step": 910 }, { "epoch": 1.04, "grad_norm": 8.135674924077264, "learning_rate": 0.000551465992158989, "loss": 5.5794, "step": 911 }, { "epoch": 1.04, "grad_norm": 3.389015482378525, "learning_rate": 0.0005513447911296796, "loss": 6.0232, "step": 912 }, { "epoch": 1.04, "grad_norm": 1.849568905152137, "learning_rate": 0.0005512234523085139, "loss": 5.8414, "step": 913 }, { "epoch": 1.04, "grad_norm": 0.9990656949482055, "learning_rate": 0.0005511019757620119, "loss": 5.8587, "step": 914 }, { "epoch": 1.05, "grad_norm": 1.345569885191008, "learning_rate": 0.0005509803615567693, "loss": 5.9227, "step": 915 }, { "epoch": 1.05, "grad_norm": 1.5618952641877304, "learning_rate": 0.0005508586097594573, "loss": 6.0792, "step": 916 }, { "epoch": 1.05, "grad_norm": 1.3158770175624521, "learning_rate": 0.0005507367204368225, "loss": 5.7001, "step": 917 }, { "epoch": 1.05, "grad_norm": 1.1731073465162067, "learning_rate": 0.0005506146936556868, "loss": 5.9324, "step": 918 }, { "epoch": 1.05, "grad_norm": 0.9540339883718265, "learning_rate": 0.0005504925294829476, "loss": 5.6842, "step": 919 }, { "epoch": 1.05, "grad_norm": 1.430961627461695, "learning_rate": 0.0005503702279855775, "loss": 5.7338, "step": 920 }, { "epoch": 1.05, "grad_norm": 1.6821953616921737, "learning_rate": 0.0005502477892306244, "loss": 5.6171, "step": 921 }, { "epoch": 1.05, "grad_norm": 0.7754536364008806, "learning_rate": 0.0005501252132852117, "loss": 5.5628, "step": 922 }, { "epoch": 1.05, "grad_norm": 0.9602961477618711, "learning_rate": 0.0005500025002165375, "loss": 5.6171, "step": 923 }, { "epoch": 1.06, "grad_norm": 0.9916363444788016, "learning_rate": 0.0005498796500918757, "loss": 5.5605, "step": 924 }, { "epoch": 1.06, "grad_norm": 1.2721100564937624, "learning_rate": 0.0005497566629785747, "loss": 5.5382, "step": 925 }, { "epoch": 1.06, "grad_norm": 0.779774774092262, "learning_rate": 0.0005496335389440584, "loss": 5.4036, "step": 926 }, { "epoch": 1.06, "grad_norm": 1.189913105183614, "learning_rate": 0.0005495102780558258, "loss": 5.4056, "step": 927 }, { "epoch": 1.06, "grad_norm": 1.0134822929320961, "learning_rate": 0.0005493868803814507, "loss": 5.3807, "step": 928 }, { "epoch": 1.06, "grad_norm": 0.8704819303064539, "learning_rate": 0.000549263345988582, "loss": 5.5341, "step": 929 }, { "epoch": 1.06, "grad_norm": 0.8059298190144354, "learning_rate": 0.0005491396749449435, "loss": 5.4374, "step": 930 }, { "epoch": 1.06, "grad_norm": 0.6904943204010585, "learning_rate": 0.0005490158673183339, "loss": 5.4564, "step": 931 }, { "epoch": 1.07, "grad_norm": 0.5802006136391343, "learning_rate": 0.000548891923176627, "loss": 5.5058, "step": 932 }, { "epoch": 1.07, "grad_norm": 0.5640010178765102, "learning_rate": 0.0005487678425877711, "loss": 5.5365, "step": 933 }, { "epoch": 1.07, "grad_norm": 0.6536527164883787, "learning_rate": 0.0005486436256197896, "loss": 5.3437, "step": 934 }, { "epoch": 1.07, "grad_norm": 0.6812923461974892, "learning_rate": 0.0005485192723407804, "loss": 5.5345, "step": 935 }, { "epoch": 1.07, "grad_norm": 0.5803889600092739, "learning_rate": 0.0005483947828189164, "loss": 5.359, "step": 936 }, { "epoch": 1.07, "grad_norm": 0.5620865526080171, "learning_rate": 0.0005482701571224449, "loss": 5.4258, "step": 937 }, { "epoch": 1.07, "grad_norm": 0.5665152600534319, "learning_rate": 0.0005481453953196881, "loss": 5.4097, "step": 938 }, { "epoch": 1.07, "grad_norm": 0.4554381218894331, "learning_rate": 0.0005480204974790429, "loss": 5.4437, "step": 939 }, { "epoch": 1.07, "grad_norm": 0.4535771384583137, "learning_rate": 0.0005478954636689802, "loss": 5.5134, "step": 940 }, { "epoch": 1.08, "grad_norm": 0.5198822494198634, "learning_rate": 0.0005477702939580462, "loss": 5.4077, "step": 941 }, { "epoch": 1.08, "grad_norm": 0.6286392732786166, "learning_rate": 0.0005476449884148612, "loss": 5.5844, "step": 942 }, { "epoch": 1.08, "grad_norm": 0.538645897339748, "learning_rate": 0.0005475195471081198, "loss": 5.2842, "step": 943 }, { "epoch": 1.08, "grad_norm": 0.4448483241361265, "learning_rate": 0.0005473939701065914, "loss": 5.4075, "step": 944 }, { "epoch": 1.08, "grad_norm": 0.4928477362971769, "learning_rate": 0.0005472682574791197, "loss": 5.3707, "step": 945 }, { "epoch": 1.08, "grad_norm": 0.44884727838729455, "learning_rate": 0.0005471424092946224, "loss": 5.4251, "step": 946 }, { "epoch": 1.08, "grad_norm": 0.45929253838373363, "learning_rate": 0.0005470164256220921, "loss": 5.4952, "step": 947 }, { "epoch": 1.08, "grad_norm": 0.4236419429024913, "learning_rate": 0.0005468903065305954, "loss": 5.4036, "step": 948 }, { "epoch": 1.08, "grad_norm": 0.5233565487235446, "learning_rate": 0.0005467640520892727, "loss": 5.3376, "step": 949 }, { "epoch": 1.09, "grad_norm": 0.4329616277140555, "learning_rate": 0.0005466376623673395, "loss": 5.283, "step": 950 }, { "epoch": 1.09, "grad_norm": 0.3667327889508278, "learning_rate": 0.0005465111374340847, "loss": 5.3586, "step": 951 }, { "epoch": 1.09, "grad_norm": 0.4328317087537683, "learning_rate": 0.0005463844773588716, "loss": 5.461, "step": 952 }, { "epoch": 1.09, "grad_norm": 0.46983453049818547, "learning_rate": 0.0005462576822111378, "loss": 5.324, "step": 953 }, { "epoch": 1.09, "grad_norm": 0.4779465029832866, "learning_rate": 0.0005461307520603946, "loss": 5.3534, "step": 954 }, { "epoch": 1.09, "grad_norm": 0.5113433793692967, "learning_rate": 0.0005460036869762274, "loss": 5.5193, "step": 955 }, { "epoch": 1.09, "grad_norm": 0.39547383853989404, "learning_rate": 0.0005458764870282957, "loss": 5.4714, "step": 956 }, { "epoch": 1.09, "grad_norm": 0.49252234865264993, "learning_rate": 0.000545749152286333, "loss": 5.3798, "step": 957 }, { "epoch": 1.09, "grad_norm": 0.38514045882203557, "learning_rate": 0.0005456216828201463, "loss": 5.4192, "step": 958 }, { "epoch": 1.1, "grad_norm": 0.4382990119053008, "learning_rate": 0.0005454940786996167, "loss": 5.5529, "step": 959 }, { "epoch": 1.1, "grad_norm": 0.32111762981057274, "learning_rate": 0.0005453663399946994, "loss": 5.3956, "step": 960 }, { "epoch": 1.1, "grad_norm": 0.47889088074544656, "learning_rate": 0.000545238466775423, "loss": 5.5267, "step": 961 }, { "epoch": 1.1, "grad_norm": 0.4342888361795797, "learning_rate": 0.0005451104591118899, "loss": 5.2948, "step": 962 }, { "epoch": 1.1, "grad_norm": 0.43507834937912576, "learning_rate": 0.0005449823170742763, "loss": 5.3297, "step": 963 }, { "epoch": 1.1, "grad_norm": 0.4691778214607147, "learning_rate": 0.000544854040732832, "loss": 5.342, "step": 964 }, { "epoch": 1.1, "grad_norm": 0.42373811765038427, "learning_rate": 0.0005447256301578807, "loss": 5.3019, "step": 965 }, { "epoch": 1.1, "grad_norm": 0.47602448370485123, "learning_rate": 0.000544597085419819, "loss": 5.4233, "step": 966 }, { "epoch": 1.11, "grad_norm": 0.4381774569483418, "learning_rate": 0.0005444684065891178, "loss": 5.3006, "step": 967 }, { "epoch": 1.11, "grad_norm": 0.5762417968798564, "learning_rate": 0.0005443395937363213, "loss": 5.2971, "step": 968 }, { "epoch": 1.11, "grad_norm": 0.38509568448221154, "learning_rate": 0.0005442106469320467, "loss": 5.4138, "step": 969 }, { "epoch": 1.11, "grad_norm": 0.39923840991046794, "learning_rate": 0.0005440815662469855, "loss": 5.3991, "step": 970 }, { "epoch": 1.11, "grad_norm": 0.4040077413261883, "learning_rate": 0.0005439523517519017, "loss": 5.4306, "step": 971 }, { "epoch": 1.11, "grad_norm": 0.49684736549162845, "learning_rate": 0.0005438230035176333, "loss": 5.2718, "step": 972 }, { "epoch": 1.11, "grad_norm": 0.39279382434163546, "learning_rate": 0.0005436935216150913, "loss": 5.3615, "step": 973 }, { "epoch": 1.11, "grad_norm": 0.4561845712971142, "learning_rate": 0.0005435639061152602, "loss": 5.3471, "step": 974 }, { "epoch": 1.11, "grad_norm": 0.4142674903649662, "learning_rate": 0.0005434341570891975, "loss": 5.2826, "step": 975 }, { "epoch": 1.12, "grad_norm": 0.3966488199536707, "learning_rate": 0.000543304274608034, "loss": 5.222, "step": 976 }, { "epoch": 1.12, "grad_norm": 0.3875496741056192, "learning_rate": 0.0005431742587429737, "loss": 5.373, "step": 977 }, { "epoch": 1.12, "grad_norm": 0.37522111258147706, "learning_rate": 0.0005430441095652938, "loss": 5.3349, "step": 978 }, { "epoch": 1.12, "grad_norm": 0.40874829397803397, "learning_rate": 0.0005429138271463443, "loss": 5.4291, "step": 979 }, { "epoch": 1.12, "grad_norm": 0.4002631809269874, "learning_rate": 0.0005427834115575485, "loss": 5.1663, "step": 980 }, { "epoch": 1.12, "grad_norm": 0.41485118376581664, "learning_rate": 0.0005426528628704026, "loss": 5.292, "step": 981 }, { "epoch": 1.12, "grad_norm": 0.4224485995069427, "learning_rate": 0.000542522181156476, "loss": 5.3373, "step": 982 }, { "epoch": 1.12, "grad_norm": 0.5036998860052495, "learning_rate": 0.0005423913664874105, "loss": 5.1369, "step": 983 }, { "epoch": 1.12, "grad_norm": 0.6911664850605997, "learning_rate": 0.0005422604189349215, "loss": 5.0799, "step": 984 }, { "epoch": 1.13, "grad_norm": 0.39056563564232344, "learning_rate": 0.0005421293385707966, "loss": 5.0372, "step": 985 }, { "epoch": 1.13, "grad_norm": 0.440272074419684, "learning_rate": 0.0005419981254668966, "loss": 5.1056, "step": 986 }, { "epoch": 1.13, "grad_norm": 0.5453022966537585, "learning_rate": 0.0005418667796951547, "loss": 5.0368, "step": 987 }, { "epoch": 1.13, "grad_norm": 0.7381615796782015, "learning_rate": 0.0005417353013275774, "loss": 5.283, "step": 988 }, { "epoch": 1.13, "grad_norm": 0.4871024955019236, "learning_rate": 0.0005416036904362434, "loss": 5.1677, "step": 989 }, { "epoch": 1.13, "grad_norm": 0.42346412380114135, "learning_rate": 0.0005414719470933042, "loss": 5.172, "step": 990 }, { "epoch": 1.13, "grad_norm": 0.39785046165691595, "learning_rate": 0.0005413400713709841, "loss": 5.075, "step": 991 }, { "epoch": 1.13, "grad_norm": 0.4597919559256249, "learning_rate": 0.0005412080633415796, "loss": 5.0261, "step": 992 }, { "epoch": 1.13, "grad_norm": 0.48832401930097885, "learning_rate": 0.0005410759230774602, "loss": 5.162, "step": 993 }, { "epoch": 1.14, "grad_norm": 0.44151080223688743, "learning_rate": 0.0005409436506510673, "loss": 5.2777, "step": 994 }, { "epoch": 1.14, "grad_norm": 0.5070336368708293, "learning_rate": 0.0005408112461349154, "loss": 5.0851, "step": 995 }, { "epoch": 1.14, "grad_norm": 0.4072520725175822, "learning_rate": 0.0005406787096015909, "loss": 5.2743, "step": 996 }, { "epoch": 1.14, "grad_norm": 0.37469890432209424, "learning_rate": 0.0005405460411237527, "loss": 5.1918, "step": 997 }, { "epoch": 1.14, "grad_norm": 0.5508067288261687, "learning_rate": 0.0005404132407741324, "loss": 5.1668, "step": 998 }, { "epoch": 1.14, "grad_norm": 0.45907921678099606, "learning_rate": 0.0005402803086255334, "loss": 5.1898, "step": 999 }, { "epoch": 1.14, "grad_norm": 0.43339870809685505, "learning_rate": 0.0005401472447508316, "loss": 5.1038, "step": 1000 }, { "epoch": 1.14, "eval_blimp_filtered_avg": 0.5517910447761194, "eval_blimp_filtered_std": 0.0054038062867259986, "step": 1000 }, { "epoch": 1.14, "eval_blimp_supplement_avg": 0.5021551724137931, "eval_blimp_supplement_std": 0.022915642200552703, "step": 1000 }, { "epoch": 1.14, "eval_vqa_filtered_avg": 0.32, "eval_vqa_filtered_std": 0.046882617226215034, "step": 1000 }, { "epoch": 1.14, "eval_winoground_filtered_avg": 0.62, "eval_winoground_filtered_std": 0.048783173121456316, "step": 1000 }, { "epoch": 1.14, "grad_norm": 0.561893703164224, "learning_rate": 0.0005400140492229751, "loss": 5.098, "step": 1001 }, { "epoch": 1.15, "grad_norm": 1.0296193165838514, "learning_rate": 0.0005398807221149841, "loss": 5.2231, "step": 1002 }, { "epoch": 1.15, "grad_norm": 0.3614293013467432, "learning_rate": 0.0005397472634999511, "loss": 5.3029, "step": 1003 }, { "epoch": 1.15, "grad_norm": 0.3690829892552953, "learning_rate": 0.0005396136734510405, "loss": 5.1894, "step": 1004 }, { "epoch": 1.15, "grad_norm": 0.48725399253717033, "learning_rate": 0.0005394799520414887, "loss": 5.2917, "step": 1005 }, { "epoch": 1.15, "grad_norm": 0.4440608159355949, "learning_rate": 0.0005393460993446044, "loss": 5.099, "step": 1006 }, { "epoch": 1.15, "grad_norm": 0.3287981011951608, "learning_rate": 0.0005392121154337681, "loss": 5.2243, "step": 1007 }, { "epoch": 1.15, "grad_norm": 0.40426056123389564, "learning_rate": 0.0005390780003824322, "loss": 5.1458, "step": 1008 }, { "epoch": 1.15, "grad_norm": 0.43324452593595014, "learning_rate": 0.0005389437542641211, "loss": 5.1089, "step": 1009 }, { "epoch": 1.15, "grad_norm": 0.3934412888466023, "learning_rate": 0.000538809377152431, "loss": 5.249, "step": 1010 }, { "epoch": 1.16, "grad_norm": 0.43515186525022376, "learning_rate": 0.00053867486912103, "loss": 5.189, "step": 1011 }, { "epoch": 1.16, "grad_norm": 0.41632322727536497, "learning_rate": 0.0005385402302436577, "loss": 5.2967, "step": 1012 }, { "epoch": 1.16, "grad_norm": 0.4233719150686152, "learning_rate": 0.0005384054605941257, "loss": 5.0676, "step": 1013 }, { "epoch": 1.16, "grad_norm": 0.4138163754981368, "learning_rate": 0.0005382705602463172, "loss": 5.1771, "step": 1014 }, { "epoch": 1.16, "grad_norm": 0.4344236088728919, "learning_rate": 0.0005381355292741872, "loss": 5.07, "step": 1015 }, { "epoch": 1.16, "grad_norm": 0.3720724280629541, "learning_rate": 0.000538000367751762, "loss": 5.1999, "step": 1016 }, { "epoch": 1.16, "grad_norm": 0.4388187206521687, "learning_rate": 0.0005378650757531398, "loss": 5.1968, "step": 1017 }, { "epoch": 1.16, "grad_norm": 0.42228607254537337, "learning_rate": 0.0005377296533524899, "loss": 5.3091, "step": 1018 }, { "epoch": 1.16, "grad_norm": 0.4223931884826277, "learning_rate": 0.0005375941006240536, "loss": 5.2244, "step": 1019 }, { "epoch": 1.17, "grad_norm": 0.38467768367623273, "learning_rate": 0.0005374584176421434, "loss": 4.998, "step": 1020 }, { "epoch": 1.17, "grad_norm": 0.5121740364456557, "learning_rate": 0.0005373226044811431, "loss": 5.0163, "step": 1021 }, { "epoch": 1.17, "grad_norm": 0.4071237000591364, "learning_rate": 0.0005371866612155081, "loss": 5.067, "step": 1022 }, { "epoch": 1.17, "grad_norm": 0.40043372174867475, "learning_rate": 0.0005370505879197648, "loss": 5.0916, "step": 1023 }, { "epoch": 1.17, "grad_norm": 0.41700300758287395, "learning_rate": 0.0005369143846685113, "loss": 5.2139, "step": 1024 }, { "epoch": 1.17, "grad_norm": 0.5689300660937899, "learning_rate": 0.0005367780515364169, "loss": 5.2711, "step": 1025 }, { "epoch": 1.17, "grad_norm": 0.4435024728784905, "learning_rate": 0.0005366415885982215, "loss": 5.2629, "step": 1026 }, { "epoch": 1.17, "grad_norm": 0.3848453877625384, "learning_rate": 0.0005365049959287369, "loss": 5.1593, "step": 1027 }, { "epoch": 1.17, "grad_norm": 0.4351079424513521, "learning_rate": 0.0005363682736028456, "loss": 5.1141, "step": 1028 }, { "epoch": 1.18, "grad_norm": 0.4106272769561721, "learning_rate": 0.0005362314216955014, "loss": 5.2094, "step": 1029 }, { "epoch": 1.18, "grad_norm": 0.40084898571029265, "learning_rate": 0.0005360944402817289, "loss": 5.2634, "step": 1030 }, { "epoch": 1.18, "grad_norm": 0.5247165125294693, "learning_rate": 0.0005359573294366241, "loss": 5.2283, "step": 1031 }, { "epoch": 1.18, "grad_norm": 0.4449757409940425, "learning_rate": 0.0005358200892353535, "loss": 5.3638, "step": 1032 }, { "epoch": 1.18, "grad_norm": 0.457256484491978, "learning_rate": 0.0005356827197531546, "loss": 5.3192, "step": 1033 }, { "epoch": 1.18, "grad_norm": 0.39482283802731993, "learning_rate": 0.0005355452210653363, "loss": 5.1406, "step": 1034 }, { "epoch": 1.18, "grad_norm": 0.416256348753842, "learning_rate": 0.0005354075932472775, "loss": 5.2381, "step": 1035 }, { "epoch": 1.18, "grad_norm": 0.4058913011619471, "learning_rate": 0.0005352698363744285, "loss": 5.0604, "step": 1036 }, { "epoch": 1.19, "grad_norm": 0.38800126651414524, "learning_rate": 0.0005351319505223101, "loss": 5.1878, "step": 1037 }, { "epoch": 1.19, "grad_norm": 0.43945411448951976, "learning_rate": 0.0005349939357665139, "loss": 5.108, "step": 1038 }, { "epoch": 1.19, "grad_norm": 0.41254712172825325, "learning_rate": 0.0005348557921827021, "loss": 5.1048, "step": 1039 }, { "epoch": 1.19, "grad_norm": 0.43600766523554596, "learning_rate": 0.0005347175198466077, "loss": 5.2534, "step": 1040 }, { "epoch": 1.19, "grad_norm": 0.5096982108250583, "learning_rate": 0.0005345791188340341, "loss": 5.2208, "step": 1041 }, { "epoch": 1.19, "grad_norm": 0.3678708913414918, "learning_rate": 0.000534440589220855, "loss": 5.2111, "step": 1042 }, { "epoch": 1.19, "grad_norm": 0.39416625832123875, "learning_rate": 0.0005343019310830153, "loss": 4.9403, "step": 1043 }, { "epoch": 1.19, "grad_norm": 0.441812212371377, "learning_rate": 0.0005341631444965298, "loss": 5.0192, "step": 1044 }, { "epoch": 1.19, "grad_norm": 0.4664726864030427, "learning_rate": 0.0005340242295374839, "loss": 5.1739, "step": 1045 }, { "epoch": 1.2, "grad_norm": 0.5130144574780617, "learning_rate": 0.0005338851862820332, "loss": 5.1567, "step": 1046 }, { "epoch": 1.2, "grad_norm": 0.47307547046601034, "learning_rate": 0.0005337460148064039, "loss": 5.3359, "step": 1047 }, { "epoch": 1.2, "grad_norm": 0.4050663421380887, "learning_rate": 0.0005336067151868924, "loss": 5.3024, "step": 1048 }, { "epoch": 1.2, "grad_norm": 0.4042550322657816, "learning_rate": 0.0005334672874998652, "loss": 5.152, "step": 1049 }, { "epoch": 1.2, "grad_norm": 0.4328215366515683, "learning_rate": 0.0005333277318217592, "loss": 5.1895, "step": 1050 }, { "epoch": 1.2, "grad_norm": 0.3710840385372714, "learning_rate": 0.0005331880482290815, "loss": 5.0731, "step": 1051 }, { "epoch": 1.2, "grad_norm": 0.43238194363062804, "learning_rate": 0.0005330482367984092, "loss": 5.1334, "step": 1052 }, { "epoch": 1.2, "grad_norm": 0.4769103042422726, "learning_rate": 0.0005329082976063893, "loss": 5.3516, "step": 1053 }, { "epoch": 1.2, "grad_norm": 0.559016146728873, "learning_rate": 0.0005327682307297392, "loss": 5.3012, "step": 1054 }, { "epoch": 1.21, "grad_norm": 0.48843851110696007, "learning_rate": 0.0005326280362452463, "loss": 5.0866, "step": 1055 }, { "epoch": 1.21, "grad_norm": 0.40017829206814876, "learning_rate": 0.0005324877142297676, "loss": 5.0901, "step": 1056 }, { "epoch": 1.21, "grad_norm": 0.9415715783550416, "learning_rate": 0.0005323472647602302, "loss": 5.2464, "step": 1057 }, { "epoch": 1.21, "grad_norm": 0.43328766131483776, "learning_rate": 0.0005322066879136312, "loss": 5.1957, "step": 1058 }, { "epoch": 1.21, "grad_norm": 0.7555414193547236, "learning_rate": 0.0005320659837670374, "loss": 5.1849, "step": 1059 }, { "epoch": 1.21, "grad_norm": 0.6087953298003287, "learning_rate": 0.0005319251523975854, "loss": 5.3056, "step": 1060 }, { "epoch": 1.21, "grad_norm": 0.9355953042873021, "learning_rate": 0.0005317841938824815, "loss": 5.2214, "step": 1061 }, { "epoch": 1.21, "grad_norm": 0.6018912899004875, "learning_rate": 0.0005316431082990019, "loss": 5.2228, "step": 1062 }, { "epoch": 1.21, "grad_norm": 0.7489310809591273, "learning_rate": 0.0005315018957244922, "loss": 5.3824, "step": 1063 }, { "epoch": 1.22, "grad_norm": 0.594942303799756, "learning_rate": 0.0005313605562363678, "loss": 5.1788, "step": 1064 }, { "epoch": 1.22, "grad_norm": 0.5581498152103808, "learning_rate": 0.0005312190899121135, "loss": 5.1229, "step": 1065 }, { "epoch": 1.22, "grad_norm": 0.76101456135337, "learning_rate": 0.000531077496829284, "loss": 5.2446, "step": 1066 }, { "epoch": 1.22, "grad_norm": 0.46515444374074355, "learning_rate": 0.000530935777065503, "loss": 5.2714, "step": 1067 }, { "epoch": 1.22, "grad_norm": 0.8552880602797566, "learning_rate": 0.000530793930698464, "loss": 5.0353, "step": 1068 }, { "epoch": 1.22, "grad_norm": 0.4566651456802952, "learning_rate": 0.0005306519578059299, "loss": 5.0689, "step": 1069 }, { "epoch": 1.22, "grad_norm": 1.8450954033538456, "learning_rate": 0.0005305098584657326, "loss": 5.1349, "step": 1070 }, { "epoch": 1.22, "grad_norm": 0.8344903563315349, "learning_rate": 0.0005303676327557738, "loss": 5.2492, "step": 1071 }, { "epoch": 1.23, "grad_norm": 1.0529220822832457, "learning_rate": 0.0005302252807540241, "loss": 5.1619, "step": 1072 }, { "epoch": 1.23, "grad_norm": 0.7847508089507222, "learning_rate": 0.0005300828025385237, "loss": 5.0921, "step": 1073 }, { "epoch": 1.23, "grad_norm": 0.5765721281977121, "learning_rate": 0.0005299401981873817, "loss": 5.0825, "step": 1074 }, { "epoch": 1.23, "grad_norm": 2.8209609810367398, "learning_rate": 0.0005297974677787764, "loss": 5.1608, "step": 1075 }, { "epoch": 1.23, "grad_norm": 0.7479079624234641, "learning_rate": 0.0005296546113909552, "loss": 5.1184, "step": 1076 }, { "epoch": 1.23, "grad_norm": 0.980141376444674, "learning_rate": 0.0005295116291022346, "loss": 5.3642, "step": 1077 }, { "epoch": 1.23, "grad_norm": 1.1246465102398895, "learning_rate": 0.0005293685209910002, "loss": 5.179, "step": 1078 }, { "epoch": 1.23, "grad_norm": 0.8278087361805492, "learning_rate": 0.0005292252871357066, "loss": 5.2764, "step": 1079 }, { "epoch": 1.23, "grad_norm": 0.5433776067573339, "learning_rate": 0.0005290819276148772, "loss": 5.1895, "step": 1080 }, { "epoch": 1.24, "grad_norm": 0.776020564990142, "learning_rate": 0.0005289384425071044, "loss": 5.1853, "step": 1081 }, { "epoch": 1.24, "grad_norm": 1.2909933279579868, "learning_rate": 0.000528794831891049, "loss": 5.0572, "step": 1082 }, { "epoch": 1.24, "grad_norm": 1.065713103005686, "learning_rate": 0.0005286510958454414, "loss": 5.2095, "step": 1083 }, { "epoch": 1.24, "grad_norm": 0.42218422264035027, "learning_rate": 0.0005285072344490803, "loss": 4.9899, "step": 1084 }, { "epoch": 1.24, "grad_norm": 0.6041649032325777, "learning_rate": 0.000528363247780833, "loss": 5.1199, "step": 1085 }, { "epoch": 1.24, "grad_norm": 0.7779264492556021, "learning_rate": 0.0005282191359196357, "loss": 5.2199, "step": 1086 }, { "epoch": 1.24, "grad_norm": 0.7844203663727362, "learning_rate": 0.0005280748989444934, "loss": 5.1673, "step": 1087 }, { "epoch": 1.24, "grad_norm": 0.5231115030213644, "learning_rate": 0.0005279305369344792, "loss": 5.05, "step": 1088 }, { "epoch": 1.24, "grad_norm": 0.516594230724654, "learning_rate": 0.0005277860499687351, "loss": 5.2657, "step": 1089 }, { "epoch": 1.25, "grad_norm": 0.5503100793018348, "learning_rate": 0.0005276414381264715, "loss": 5.1392, "step": 1090 }, { "epoch": 1.25, "grad_norm": 0.7698087590065096, "learning_rate": 0.0005274967014869675, "loss": 5.289, "step": 1091 }, { "epoch": 1.25, "grad_norm": 0.8246697311616605, "learning_rate": 0.0005273518401295702, "loss": 5.1584, "step": 1092 }, { "epoch": 1.25, "grad_norm": 0.5362423575948931, "learning_rate": 0.0005272068541336952, "loss": 5.0175, "step": 1093 }, { "epoch": 1.25, "grad_norm": 0.4853677333920353, "learning_rate": 0.0005270617435788268, "loss": 5.2061, "step": 1094 }, { "epoch": 1.25, "grad_norm": 0.6987958342744579, "learning_rate": 0.0005269165085445169, "loss": 5.1275, "step": 1095 }, { "epoch": 1.25, "grad_norm": 0.7670909887700316, "learning_rate": 0.0005267711491103864, "loss": 4.9837, "step": 1096 }, { "epoch": 1.25, "grad_norm": 0.46236244350157996, "learning_rate": 0.0005266256653561237, "loss": 5.1815, "step": 1097 }, { "epoch": 1.25, "grad_norm": 0.7110860064589077, "learning_rate": 0.0005264800573614859, "loss": 5.1434, "step": 1098 }, { "epoch": 1.26, "grad_norm": 0.9209883025218306, "learning_rate": 0.0005263343252062978, "loss": 5.0499, "step": 1099 }, { "epoch": 1.26, "grad_norm": 0.546314212803252, "learning_rate": 0.0005261884689704527, "loss": 5.1397, "step": 1100 }, { "epoch": 1.26, "eval_blimp_filtered_avg": 0.5491044776119403, "eval_blimp_filtered_std": 0.005438878652849802, "step": 1100 }, { "epoch": 1.26, "eval_blimp_supplement_avg": 0.5193965517241379, "eval_blimp_supplement_std": 0.022485938205597543, "step": 1100 }, { "epoch": 1.26, "eval_vqa_filtered_avg": 0.33, "eval_vqa_filtered_std": 0.04725815626252606, "step": 1100 }, { "epoch": 1.26, "eval_winoground_filtered_avg": 0.47, "eval_winoground_filtered_std": 0.05016135580465919, "step": 1100 }, { "epoch": 1.26, "grad_norm": 0.7388797308847014, "learning_rate": 0.0005260424887339114, "loss": 5.1166, "step": 1101 }, { "epoch": 1.26, "grad_norm": 0.551647769812749, "learning_rate": 0.0005258963845767034, "loss": 5.1336, "step": 1102 }, { "epoch": 1.26, "grad_norm": 0.7669349059020705, "learning_rate": 0.0005257501565789252, "loss": 4.9917, "step": 1103 }, { "epoch": 1.26, "grad_norm": 1.054321445801503, "learning_rate": 0.000525603804820742, "loss": 4.9136, "step": 1104 }, { "epoch": 1.26, "grad_norm": 0.7449588210770962, "learning_rate": 0.0005254573293823864, "loss": 4.9924, "step": 1105 }, { "epoch": 1.26, "grad_norm": 0.798373357755002, "learning_rate": 0.000525310730344159, "loss": 5.0163, "step": 1106 }, { "epoch": 1.27, "grad_norm": 0.446993844530049, "learning_rate": 0.0005251640077864281, "loss": 5.0281, "step": 1107 }, { "epoch": 1.27, "grad_norm": 0.8892870322040823, "learning_rate": 0.0005250171617896297, "loss": 4.9492, "step": 1108 }, { "epoch": 1.27, "grad_norm": 0.39901731637201354, "learning_rate": 0.0005248701924342675, "loss": 4.9201, "step": 1109 }, { "epoch": 1.27, "grad_norm": 0.45972349421204234, "learning_rate": 0.0005247230998009127, "loss": 5.1685, "step": 1110 }, { "epoch": 1.27, "grad_norm": 0.9634401138068055, "learning_rate": 0.0005245758839702044, "loss": 5.0518, "step": 1111 }, { "epoch": 1.27, "grad_norm": 0.440466459676811, "learning_rate": 0.000524428545022849, "loss": 5.0364, "step": 1112 }, { "epoch": 1.27, "grad_norm": 1.7693820096440174, "learning_rate": 0.0005242810830396202, "loss": 5.1071, "step": 1113 }, { "epoch": 1.27, "grad_norm": 1.1038071847876403, "learning_rate": 0.0005241334981013597, "loss": 5.0515, "step": 1114 }, { "epoch": 1.27, "grad_norm": 1.1143026568331607, "learning_rate": 0.0005239857902889761, "loss": 4.9906, "step": 1115 }, { "epoch": 1.28, "grad_norm": 0.563283524456605, "learning_rate": 0.0005238379596834455, "loss": 5.0492, "step": 1116 }, { "epoch": 1.28, "grad_norm": 1.6583808078563582, "learning_rate": 0.0005236900063658117, "loss": 5.237, "step": 1117 }, { "epoch": 1.28, "grad_norm": 0.8356491269305153, "learning_rate": 0.0005235419304171849, "loss": 5.1512, "step": 1118 }, { "epoch": 1.28, "grad_norm": 1.1297988268249588, "learning_rate": 0.0005233937319187435, "loss": 5.0435, "step": 1119 }, { "epoch": 1.28, "grad_norm": 0.6528510026862676, "learning_rate": 0.0005232454109517326, "loss": 5.0446, "step": 1120 }, { "epoch": 1.28, "grad_norm": 1.4819883250572987, "learning_rate": 0.0005230969675974644, "loss": 4.9617, "step": 1121 }, { "epoch": 1.28, "grad_norm": 0.5205162404324648, "learning_rate": 0.0005229484019373182, "loss": 5.0766, "step": 1122 }, { "epoch": 1.28, "grad_norm": 0.9835362356962077, "learning_rate": 0.0005227997140527408, "loss": 4.9259, "step": 1123 }, { "epoch": 1.28, "grad_norm": 0.8178215553146111, "learning_rate": 0.0005226509040252451, "loss": 4.9575, "step": 1124 }, { "epoch": 1.29, "grad_norm": 1.0049795012814347, "learning_rate": 0.0005225019719364119, "loss": 5.0462, "step": 1125 }, { "epoch": 1.29, "grad_norm": 0.7621761099355314, "learning_rate": 0.0005223529178678884, "loss": 4.9663, "step": 1126 }, { "epoch": 1.29, "grad_norm": 0.8674700161516951, "learning_rate": 0.0005222037419013889, "loss": 4.9741, "step": 1127 }, { "epoch": 1.29, "grad_norm": 0.8917539173895458, "learning_rate": 0.0005220544441186942, "loss": 4.921, "step": 1128 }, { "epoch": 1.29, "grad_norm": 0.7421166579577743, "learning_rate": 0.0005219050246016523, "loss": 5.0439, "step": 1129 }, { "epoch": 1.29, "grad_norm": 0.8352406913970326, "learning_rate": 0.0005217554834321777, "loss": 4.9815, "step": 1130 }, { "epoch": 1.29, "grad_norm": 0.510072262676206, "learning_rate": 0.0005216058206922515, "loss": 5.0424, "step": 1131 }, { "epoch": 1.29, "grad_norm": 0.8521222428339478, "learning_rate": 0.0005214560364639218, "loss": 4.9688, "step": 1132 }, { "epoch": 1.29, "grad_norm": 0.4695137031523553, "learning_rate": 0.000521306130829303, "loss": 5.0004, "step": 1133 }, { "epoch": 1.3, "grad_norm": 0.9496825122600068, "learning_rate": 0.0005211561038705759, "loss": 5.0936, "step": 1134 }, { "epoch": 1.3, "grad_norm": 0.6639662429669891, "learning_rate": 0.0005210059556699884, "loss": 4.9557, "step": 1135 }, { "epoch": 1.3, "grad_norm": 0.6514236282093878, "learning_rate": 0.0005208556863098544, "loss": 4.7878, "step": 1136 }, { "epoch": 1.3, "grad_norm": 1.0286651488045517, "learning_rate": 0.0005207052958725542, "loss": 5.2284, "step": 1137 }, { "epoch": 1.3, "grad_norm": 0.39673992022551086, "learning_rate": 0.0005205547844405348, "loss": 5.1993, "step": 1138 }, { "epoch": 1.3, "grad_norm": 1.0415379274760892, "learning_rate": 0.0005204041520963092, "loss": 4.9702, "step": 1139 }, { "epoch": 1.3, "grad_norm": 0.4958305644905204, "learning_rate": 0.0005202533989224568, "loss": 5.2672, "step": 1140 }, { "epoch": 1.3, "grad_norm": 0.4776426094183519, "learning_rate": 0.0005201025250016236, "loss": 4.9494, "step": 1141 }, { "epoch": 1.31, "grad_norm": 0.6409779908552264, "learning_rate": 0.0005199515304165211, "loss": 4.9546, "step": 1142 }, { "epoch": 1.31, "grad_norm": 0.5864650967387575, "learning_rate": 0.0005198004152499275, "loss": 5.0828, "step": 1143 }, { "epoch": 1.31, "grad_norm": 0.8344509195774621, "learning_rate": 0.0005196491795846868, "loss": 4.8471, "step": 1144 }, { "epoch": 1.31, "grad_norm": 1.1317193855471057, "learning_rate": 0.0005194978235037094, "loss": 5.0192, "step": 1145 }, { "epoch": 1.31, "grad_norm": 0.45619035158450266, "learning_rate": 0.0005193463470899713, "loss": 4.9606, "step": 1146 }, { "epoch": 1.31, "grad_norm": 0.8584507064489265, "learning_rate": 0.0005191947504265147, "loss": 5.1415, "step": 1147 }, { "epoch": 1.31, "grad_norm": 0.4652728499512511, "learning_rate": 0.0005190430335964478, "loss": 4.9464, "step": 1148 }, { "epoch": 1.31, "grad_norm": 0.9712252813627654, "learning_rate": 0.0005188911966829445, "loss": 4.857, "step": 1149 }, { "epoch": 1.31, "grad_norm": 0.6930626494864487, "learning_rate": 0.0005187392397692447, "loss": 4.8826, "step": 1150 }, { "epoch": 1.32, "grad_norm": 0.573206615219214, "learning_rate": 0.0005185871629386537, "loss": 4.9485, "step": 1151 }, { "epoch": 1.32, "grad_norm": 0.6511105787127845, "learning_rate": 0.000518434966274543, "loss": 5.0681, "step": 1152 }, { "epoch": 1.32, "grad_norm": 0.5545478258381884, "learning_rate": 0.0005182826498603497, "loss": 5.0605, "step": 1153 }, { "epoch": 1.32, "grad_norm": 0.8252004102443666, "learning_rate": 0.0005181302137795764, "loss": 4.8447, "step": 1154 }, { "epoch": 1.32, "grad_norm": 0.6473725755168579, "learning_rate": 0.0005179776581157914, "loss": 5.0475, "step": 1155 }, { "epoch": 1.32, "grad_norm": 0.48383575322397915, "learning_rate": 0.0005178249829526286, "loss": 5.0081, "step": 1156 }, { "epoch": 1.32, "grad_norm": 0.6016024293557599, "learning_rate": 0.0005176721883737871, "loss": 4.7762, "step": 1157 }, { "epoch": 1.32, "grad_norm": 0.6948490952471152, "learning_rate": 0.000517519274463032, "loss": 4.932, "step": 1158 }, { "epoch": 1.32, "grad_norm": 0.7184347826556939, "learning_rate": 0.0005173662413041932, "loss": 5.0516, "step": 1159 }, { "epoch": 1.33, "grad_norm": 0.6232110017312381, "learning_rate": 0.0005172130889811667, "loss": 4.8772, "step": 1160 }, { "epoch": 1.33, "grad_norm": 1.13915488563697, "learning_rate": 0.0005170598175779131, "loss": 5.0593, "step": 1161 }, { "epoch": 1.33, "grad_norm": 0.5733625966435614, "learning_rate": 0.0005169064271784589, "loss": 4.8745, "step": 1162 }, { "epoch": 1.33, "grad_norm": 1.2201018034431876, "learning_rate": 0.0005167529178668952, "loss": 4.9017, "step": 1163 }, { "epoch": 1.33, "grad_norm": 0.7347307830627611, "learning_rate": 0.000516599289727379, "loss": 5.1694, "step": 1164 }, { "epoch": 1.33, "grad_norm": 0.6475265633360185, "learning_rate": 0.0005164455428441318, "loss": 4.9257, "step": 1165 }, { "epoch": 1.33, "grad_norm": 0.5842273018428894, "learning_rate": 0.0005162916773014406, "loss": 4.8271, "step": 1166 }, { "epoch": 1.33, "grad_norm": 0.6407435764808377, "learning_rate": 0.0005161376931836573, "loss": 5.0074, "step": 1167 }, { "epoch": 1.33, "grad_norm": 0.8854846808554024, "learning_rate": 0.000515983590575199, "loss": 4.8804, "step": 1168 }, { "epoch": 1.34, "grad_norm": 0.5298703149937839, "learning_rate": 0.0005158293695605474, "loss": 5.0125, "step": 1169 }, { "epoch": 1.34, "grad_norm": 1.0948542180653988, "learning_rate": 0.0005156750302242495, "loss": 4.908, "step": 1170 }, { "epoch": 1.34, "grad_norm": 0.5636888837917247, "learning_rate": 0.0005155205726509167, "loss": 4.8616, "step": 1171 }, { "epoch": 1.34, "grad_norm": 2.1187701807239594, "learning_rate": 0.0005153659969252257, "loss": 5.1501, "step": 1172 }, { "epoch": 1.34, "grad_norm": 1.7518600969040812, "learning_rate": 0.0005152113031319176, "loss": 4.9481, "step": 1173 }, { "epoch": 1.34, "grad_norm": 1.3365198206110314, "learning_rate": 0.0005150564913557987, "loss": 4.9139, "step": 1174 }, { "epoch": 1.34, "grad_norm": 2.315331396497301, "learning_rate": 0.0005149015616817392, "loss": 4.9212, "step": 1175 }, { "epoch": 1.34, "grad_norm": 0.5954872487348631, "learning_rate": 0.000514746514194675, "loss": 4.9505, "step": 1176 }, { "epoch": 1.35, "grad_norm": 1.0918409782012397, "learning_rate": 0.0005145913489796054, "loss": 4.9391, "step": 1177 }, { "epoch": 1.35, "grad_norm": 0.41015411682616726, "learning_rate": 0.0005144360661215953, "loss": 4.9366, "step": 1178 }, { "epoch": 1.35, "grad_norm": 1.0424835235782717, "learning_rate": 0.0005142806657057735, "loss": 4.9783, "step": 1179 }, { "epoch": 1.35, "grad_norm": 0.4191377513358202, "learning_rate": 0.0005141251478173332, "loss": 4.7821, "step": 1180 }, { "epoch": 1.35, "grad_norm": 0.758495752798407, "learning_rate": 0.0005139695125415323, "loss": 4.7929, "step": 1181 }, { "epoch": 1.35, "grad_norm": 0.4682771067503924, "learning_rate": 0.000513813759963693, "loss": 4.981, "step": 1182 }, { "epoch": 1.35, "grad_norm": 0.5402319032702916, "learning_rate": 0.0005136578901692016, "loss": 4.9498, "step": 1183 }, { "epoch": 1.35, "grad_norm": 0.5472569612557033, "learning_rate": 0.000513501903243509, "loss": 4.7101, "step": 1184 }, { "epoch": 1.35, "grad_norm": 0.5823065367942866, "learning_rate": 0.0005133457992721299, "loss": 5.0122, "step": 1185 }, { "epoch": 1.36, "grad_norm": 0.5158509450320617, "learning_rate": 0.0005131895783406435, "loss": 4.9891, "step": 1186 }, { "epoch": 1.36, "grad_norm": 0.5200517975373709, "learning_rate": 0.000513033240534693, "loss": 4.8529, "step": 1187 }, { "epoch": 1.36, "grad_norm": 0.5123793020328804, "learning_rate": 0.0005128767859399857, "loss": 4.9624, "step": 1188 }, { "epoch": 1.36, "grad_norm": 0.40935990902343433, "learning_rate": 0.0005127202146422927, "loss": 4.8945, "step": 1189 }, { "epoch": 1.36, "grad_norm": 0.658003655704528, "learning_rate": 0.0005125635267274496, "loss": 4.8389, "step": 1190 }, { "epoch": 1.36, "grad_norm": 0.5595005191066111, "learning_rate": 0.0005124067222813552, "loss": 4.9355, "step": 1191 }, { "epoch": 1.36, "grad_norm": 0.47186210492967656, "learning_rate": 0.000512249801389973, "loss": 4.7485, "step": 1192 }, { "epoch": 1.36, "grad_norm": 0.4428419171738912, "learning_rate": 0.0005120927641393296, "loss": 4.7719, "step": 1193 }, { "epoch": 1.36, "grad_norm": 0.4327517902953713, "learning_rate": 0.0005119356106155161, "loss": 4.9128, "step": 1194 }, { "epoch": 1.37, "grad_norm": 0.5198701019397242, "learning_rate": 0.0005117783409046866, "loss": 5.0118, "step": 1195 }, { "epoch": 1.37, "grad_norm": 0.5400972155378008, "learning_rate": 0.0005116209550930593, "loss": 5.0221, "step": 1196 }, { "epoch": 1.37, "grad_norm": 0.6193886973040498, "learning_rate": 0.0005114634532669163, "loss": 4.992, "step": 1197 }, { "epoch": 1.37, "grad_norm": 0.45108723732597616, "learning_rate": 0.0005113058355126027, "loss": 4.9304, "step": 1198 }, { "epoch": 1.37, "grad_norm": 0.9877624528823103, "learning_rate": 0.0005111481019165275, "loss": 4.8404, "step": 1199 }, { "epoch": 1.37, "grad_norm": 0.41548693451522434, "learning_rate": 0.0005109902525651635, "loss": 4.888, "step": 1200 }, { "epoch": 1.37, "eval_blimp_filtered_avg": 0.5583582089552239, "eval_blimp_filtered_std": 0.00544020239367129, "step": 1200 }, { "epoch": 1.37, "eval_blimp_supplement_avg": 0.5172413793103449, "eval_blimp_supplement_std": 0.022523037230137665, "step": 1200 }, { "epoch": 1.37, "eval_vqa_filtered_avg": 0.27, "eval_vqa_filtered_std": 0.04461960433384741, "step": 1200 }, { "epoch": 1.37, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956913, "step": 1200 }, { "epoch": 1.37, "grad_norm": 0.9957865258578577, "learning_rate": 0.0005108322875450462, "loss": 4.8594, "step": 1201 }, { "epoch": 1.37, "grad_norm": 0.6637565188705488, "learning_rate": 0.0005106742069427752, "loss": 5.0869, "step": 1202 }, { "epoch": 1.37, "grad_norm": 0.9651303579560007, "learning_rate": 0.0005105160108450131, "loss": 4.925, "step": 1203 }, { "epoch": 1.38, "grad_norm": 0.7578337233488569, "learning_rate": 0.0005103576993384859, "loss": 5.083, "step": 1204 }, { "epoch": 1.38, "grad_norm": 0.6424627149000028, "learning_rate": 0.0005101992725099829, "loss": 5.014, "step": 1205 }, { "epoch": 1.38, "grad_norm": 0.7018920473198439, "learning_rate": 0.0005100407304463568, "loss": 4.9008, "step": 1206 }, { "epoch": 1.38, "grad_norm": 0.6901004307853441, "learning_rate": 0.0005098820732345232, "loss": 4.8828, "step": 1207 }, { "epoch": 1.38, "grad_norm": 1.339425812065133, "learning_rate": 0.0005097233009614606, "loss": 4.799, "step": 1208 }, { "epoch": 1.38, "grad_norm": 0.8226150980471355, "learning_rate": 0.0005095644137142113, "loss": 4.9402, "step": 1209 }, { "epoch": 1.38, "grad_norm": 0.614965173080206, "learning_rate": 0.0005094054115798802, "loss": 4.8452, "step": 1210 }, { "epoch": 1.38, "grad_norm": 0.8738358304928479, "learning_rate": 0.0005092462946456348, "loss": 4.9363, "step": 1211 }, { "epoch": 1.39, "grad_norm": 0.5182861711293806, "learning_rate": 0.0005090870629987064, "loss": 4.795, "step": 1212 }, { "epoch": 1.39, "grad_norm": 0.9607883807950071, "learning_rate": 0.0005089277167263886, "loss": 4.8792, "step": 1213 }, { "epoch": 1.39, "grad_norm": 0.4876402318685862, "learning_rate": 0.0005087682559160378, "loss": 4.7971, "step": 1214 }, { "epoch": 1.39, "grad_norm": 1.0754644739650243, "learning_rate": 0.0005086086806550734, "loss": 4.8393, "step": 1215 }, { "epoch": 1.39, "grad_norm": 0.5238109834341227, "learning_rate": 0.0005084489910309778, "loss": 4.9783, "step": 1216 }, { "epoch": 1.39, "grad_norm": 1.0266540298506723, "learning_rate": 0.0005082891871312955, "loss": 4.8608, "step": 1217 }, { "epoch": 1.39, "grad_norm": 0.7944673287759421, "learning_rate": 0.0005081292690436339, "loss": 4.9533, "step": 1218 }, { "epoch": 1.39, "grad_norm": 0.5485454490627911, "learning_rate": 0.0005079692368556634, "loss": 4.6756, "step": 1219 }, { "epoch": 1.39, "grad_norm": 0.9326840098046364, "learning_rate": 0.0005078090906551162, "loss": 4.72, "step": 1220 }, { "epoch": 1.4, "grad_norm": 0.47197634196124005, "learning_rate": 0.0005076488305297877, "loss": 4.7063, "step": 1221 }, { "epoch": 1.4, "grad_norm": 1.4749222626088472, "learning_rate": 0.0005074884565675354, "loss": 4.9344, "step": 1222 }, { "epoch": 1.4, "grad_norm": 1.431361413407069, "learning_rate": 0.0005073279688562792, "loss": 5.0848, "step": 1223 }, { "epoch": 1.4, "grad_norm": 0.7334212657567057, "learning_rate": 0.0005071673674840013, "loss": 4.948, "step": 1224 }, { "epoch": 1.4, "grad_norm": 1.950154902108328, "learning_rate": 0.0005070066525387466, "loss": 4.955, "step": 1225 }, { "epoch": 1.4, "grad_norm": 0.7944280004669937, "learning_rate": 0.0005068458241086219, "loss": 4.9727, "step": 1226 }, { "epoch": 1.4, "grad_norm": 1.2139408258615942, "learning_rate": 0.0005066848822817963, "loss": 5.0231, "step": 1227 }, { "epoch": 1.4, "grad_norm": 0.5944887883002979, "learning_rate": 0.0005065238271465011, "loss": 4.7285, "step": 1228 }, { "epoch": 1.4, "grad_norm": 1.6876835801730448, "learning_rate": 0.0005063626587910297, "loss": 4.9803, "step": 1229 }, { "epoch": 1.41, "grad_norm": 1.0795092472889753, "learning_rate": 0.0005062013773037374, "loss": 4.6979, "step": 1230 }, { "epoch": 1.41, "grad_norm": 0.8395141578376858, "learning_rate": 0.0005060399827730419, "loss": 5.0304, "step": 1231 }, { "epoch": 1.41, "grad_norm": 0.47915198001552256, "learning_rate": 0.0005058784752874227, "loss": 4.9636, "step": 1232 }, { "epoch": 1.41, "grad_norm": 1.7573407152077023, "learning_rate": 0.000505716854935421, "loss": 5.1032, "step": 1233 }, { "epoch": 1.41, "grad_norm": 1.0911197864536943, "learning_rate": 0.0005055551218056402, "loss": 4.8738, "step": 1234 }, { "epoch": 1.41, "grad_norm": 1.2156853393203244, "learning_rate": 0.0005053932759867452, "loss": 4.8411, "step": 1235 }, { "epoch": 1.41, "grad_norm": 0.8730167999221258, "learning_rate": 0.0005052313175674632, "loss": 4.8779, "step": 1236 }, { "epoch": 1.41, "grad_norm": 1.1565281591282393, "learning_rate": 0.0005050692466365826, "loss": 4.9963, "step": 1237 }, { "epoch": 1.41, "grad_norm": 0.9156291269397416, "learning_rate": 0.0005049070632829536, "loss": 4.8102, "step": 1238 }, { "epoch": 1.42, "grad_norm": 1.1502784426219956, "learning_rate": 0.0005047447675954882, "loss": 4.8999, "step": 1239 }, { "epoch": 1.42, "grad_norm": 0.7639829592142456, "learning_rate": 0.00050458235966316, "loss": 4.7835, "step": 1240 }, { "epoch": 1.42, "grad_norm": 1.6237974884676651, "learning_rate": 0.0005044198395750037, "loss": 4.956, "step": 1241 }, { "epoch": 1.42, "grad_norm": 1.4339224680947942, "learning_rate": 0.0005042572074201163, "loss": 4.7663, "step": 1242 }, { "epoch": 1.42, "grad_norm": 0.9249375634005634, "learning_rate": 0.0005040944632876552, "loss": 4.8611, "step": 1243 }, { "epoch": 1.42, "grad_norm": 1.128083878891834, "learning_rate": 0.0005039316072668402, "loss": 4.8098, "step": 1244 }, { "epoch": 1.42, "grad_norm": 1.0841392883389345, "learning_rate": 0.0005037686394469517, "loss": 4.9768, "step": 1245 }, { "epoch": 1.42, "grad_norm": 1.0073590112013175, "learning_rate": 0.0005036055599173318, "loss": 4.8392, "step": 1246 }, { "epoch": 1.43, "grad_norm": 0.9717709390098217, "learning_rate": 0.0005034423687673836, "loss": 4.8141, "step": 1247 }, { "epoch": 1.43, "grad_norm": 0.7780132618526815, "learning_rate": 0.0005032790660865718, "loss": 4.8616, "step": 1248 }, { "epoch": 1.43, "grad_norm": 0.8315162825271477, "learning_rate": 0.0005031156519644214, "loss": 4.6124, "step": 1249 }, { "epoch": 1.43, "grad_norm": 0.4548498826413443, "learning_rate": 0.0005029521264905196, "loss": 4.9729, "step": 1250 }, { "epoch": 1.43, "grad_norm": 0.7893000678422016, "learning_rate": 0.0005027884897545139, "loss": 4.7712, "step": 1251 }, { "epoch": 1.43, "grad_norm": 0.4082461493930724, "learning_rate": 0.0005026247418461128, "loss": 4.855, "step": 1252 }, { "epoch": 1.43, "grad_norm": 0.7963173493176894, "learning_rate": 0.0005024608828550861, "loss": 4.8843, "step": 1253 }, { "epoch": 1.43, "grad_norm": 0.4481474948040786, "learning_rate": 0.0005022969128712641, "loss": 4.7978, "step": 1254 }, { "epoch": 1.43, "grad_norm": 0.49120812105606754, "learning_rate": 0.0005021328319845385, "loss": 4.7432, "step": 1255 }, { "epoch": 1.44, "grad_norm": 0.5851131275106991, "learning_rate": 0.0005019686402848614, "loss": 4.8114, "step": 1256 }, { "epoch": 1.44, "grad_norm": 0.63304651258489, "learning_rate": 0.0005018043378622456, "loss": 4.8111, "step": 1257 }, { "epoch": 1.44, "grad_norm": 0.6314754781660858, "learning_rate": 0.0005016399248067647, "loss": 4.7954, "step": 1258 }, { "epoch": 1.44, "grad_norm": 0.5366770227908617, "learning_rate": 0.0005014754012085531, "loss": 4.9664, "step": 1259 }, { "epoch": 1.44, "grad_norm": 1.0991363018408031, "learning_rate": 0.0005013107671578056, "loss": 4.7331, "step": 1260 }, { "epoch": 1.44, "grad_norm": 0.7351901258274723, "learning_rate": 0.0005011460227447776, "loss": 4.6044, "step": 1261 }, { "epoch": 1.44, "grad_norm": 1.4754451286872485, "learning_rate": 0.0005009811680597852, "loss": 4.8287, "step": 1262 }, { "epoch": 1.44, "grad_norm": 1.2090020124165997, "learning_rate": 0.0005008162031932046, "loss": 4.6633, "step": 1263 }, { "epoch": 1.44, "grad_norm": 1.367965450960771, "learning_rate": 0.0005006511282354727, "loss": 4.8497, "step": 1264 }, { "epoch": 1.45, "grad_norm": 1.4029800100559322, "learning_rate": 0.0005004859432770865, "loss": 4.6876, "step": 1265 }, { "epoch": 1.45, "grad_norm": 1.061469789627048, "learning_rate": 0.0005003206484086035, "loss": 4.7985, "step": 1266 }, { "epoch": 1.45, "grad_norm": 1.5892286372984972, "learning_rate": 0.0005001552437206415, "loss": 4.7692, "step": 1267 }, { "epoch": 1.45, "grad_norm": 0.5439740856881324, "learning_rate": 0.0004999897293038783, "loss": 4.6833, "step": 1268 }, { "epoch": 1.45, "grad_norm": 1.492458645673748, "learning_rate": 0.0004998241052490519, "loss": 4.887, "step": 1269 }, { "epoch": 1.45, "grad_norm": 0.45413512260815747, "learning_rate": 0.0004996583716469608, "loss": 4.6017, "step": 1270 }, { "epoch": 1.45, "grad_norm": 0.8283882911797816, "learning_rate": 0.0004994925285884627, "loss": 4.6879, "step": 1271 }, { "epoch": 1.45, "grad_norm": 0.639335114117073, "learning_rate": 0.0004993265761644764, "loss": 4.6942, "step": 1272 }, { "epoch": 1.45, "grad_norm": 1.5206270200592327, "learning_rate": 0.0004991605144659797, "loss": 4.8212, "step": 1273 }, { "epoch": 1.46, "grad_norm": 0.6901427967060859, "learning_rate": 0.0004989943435840107, "loss": 4.6872, "step": 1274 }, { "epoch": 1.46, "grad_norm": 0.6603345508535575, "learning_rate": 0.0004988280636096675, "loss": 4.6591, "step": 1275 }, { "epoch": 1.46, "grad_norm": 0.5730093528645751, "learning_rate": 0.0004986616746341078, "loss": 4.6447, "step": 1276 }, { "epoch": 1.46, "grad_norm": 0.42362682469383894, "learning_rate": 0.000498495176748549, "loss": 4.6906, "step": 1277 }, { "epoch": 1.46, "grad_norm": 0.48937732940455675, "learning_rate": 0.0004983285700442684, "loss": 4.7235, "step": 1278 }, { "epoch": 1.46, "grad_norm": 0.6884060486082092, "learning_rate": 0.000498161854612603, "loss": 4.7805, "step": 1279 }, { "epoch": 1.46, "grad_norm": 0.5310802121161438, "learning_rate": 0.000497995030544949, "loss": 4.741, "step": 1280 }, { "epoch": 1.46, "grad_norm": 1.1227830939190784, "learning_rate": 0.0004978280979327628, "loss": 4.7948, "step": 1281 }, { "epoch": 1.47, "grad_norm": 0.6429077618547205, "learning_rate": 0.0004976610568675596, "loss": 4.7321, "step": 1282 }, { "epoch": 1.47, "grad_norm": 1.3059947716068132, "learning_rate": 0.0004974939074409145, "loss": 4.7174, "step": 1283 }, { "epoch": 1.47, "grad_norm": 1.801050480124198, "learning_rate": 0.0004973266497444621, "loss": 4.6705, "step": 1284 }, { "epoch": 1.47, "grad_norm": 0.8958140741169052, "learning_rate": 0.0004971592838698959, "loss": 4.7151, "step": 1285 }, { "epoch": 1.47, "grad_norm": 0.7629280237440986, "learning_rate": 0.000496991809908969, "loss": 4.5902, "step": 1286 }, { "epoch": 1.47, "grad_norm": 0.6556233287369142, "learning_rate": 0.0004968242279534938, "loss": 4.9567, "step": 1287 }, { "epoch": 1.47, "grad_norm": 0.6905479871514387, "learning_rate": 0.0004966565380953419, "loss": 4.7847, "step": 1288 }, { "epoch": 1.47, "grad_norm": 1.070199058361432, "learning_rate": 0.0004964887404264438, "loss": 4.6834, "step": 1289 }, { "epoch": 1.47, "grad_norm": 0.6174604576519214, "learning_rate": 0.0004963208350387893, "loss": 4.8873, "step": 1290 }, { "epoch": 1.48, "grad_norm": 1.1293149455453142, "learning_rate": 0.0004961528220244273, "loss": 4.5958, "step": 1291 }, { "epoch": 1.48, "grad_norm": 1.4572752181086612, "learning_rate": 0.0004959847014754657, "loss": 4.6508, "step": 1292 }, { "epoch": 1.48, "grad_norm": 0.45409888145265287, "learning_rate": 0.0004958164734840712, "loss": 4.7024, "step": 1293 }, { "epoch": 1.48, "grad_norm": 1.4156967579984714, "learning_rate": 0.0004956481381424695, "loss": 4.6617, "step": 1294 }, { "epoch": 1.48, "grad_norm": 0.8419720982766232, "learning_rate": 0.0004954796955429451, "loss": 4.7783, "step": 1295 }, { "epoch": 1.48, "grad_norm": 1.3810440242487838, "learning_rate": 0.0004953111457778415, "loss": 4.8309, "step": 1296 }, { "epoch": 1.48, "grad_norm": 1.221250539264744, "learning_rate": 0.0004951424889395607, "loss": 4.6704, "step": 1297 }, { "epoch": 1.48, "grad_norm": 1.5138682629424352, "learning_rate": 0.0004949737251205633, "loss": 4.83, "step": 1298 }, { "epoch": 1.48, "grad_norm": 2.204955815774822, "learning_rate": 0.0004948048544133691, "loss": 4.7474, "step": 1299 }, { "epoch": 1.49, "grad_norm": 0.4067880260243774, "learning_rate": 0.0004946358769105559, "loss": 4.7254, "step": 1300 }, { "epoch": 1.49, "eval_blimp_filtered_avg": 0.5467164179104478, "eval_blimp_filtered_std": 0.005442104706995175, "step": 1300 }, { "epoch": 1.49, "eval_blimp_supplement_avg": 0.540948275862069, "eval_blimp_supplement_std": 0.022892130922599736, "step": 1300 }, { "epoch": 1.49, "eval_vqa_filtered_avg": 0.15, "eval_vqa_filtered_std": 0.035887028128263734, "step": 1300 }, { "epoch": 1.49, "eval_winoground_filtered_avg": 0.52, "eval_winoground_filtered_std": 0.05021167315686779, "step": 1300 }, { "epoch": 1.49, "grad_norm": 1.3858859649800603, "learning_rate": 0.0004944667927047604, "loss": 4.7761, "step": 1301 }, { "epoch": 1.49, "grad_norm": 0.5719051975111162, "learning_rate": 0.0004942976018886776, "loss": 4.7397, "step": 1302 }, { "epoch": 1.49, "grad_norm": 1.7301528252563851, "learning_rate": 0.000494128304555061, "loss": 4.6252, "step": 1303 }, { "epoch": 1.49, "grad_norm": 0.5916035548349112, "learning_rate": 0.0004939589007967228, "loss": 4.6803, "step": 1304 }, { "epoch": 1.49, "grad_norm": 1.4469993774576138, "learning_rate": 0.0004937893907065329, "loss": 4.7683, "step": 1305 }, { "epoch": 1.49, "grad_norm": 0.6450585381532522, "learning_rate": 0.00049361977437742, "loss": 4.9302, "step": 1306 }, { "epoch": 1.49, "grad_norm": 2.378127853025137, "learning_rate": 0.000493450051902371, "loss": 4.9909, "step": 1307 }, { "epoch": 1.49, "grad_norm": 0.5976587289671206, "learning_rate": 0.0004932802233744308, "loss": 4.6942, "step": 1308 }, { "epoch": 1.5, "grad_norm": 2.447640477599913, "learning_rate": 0.0004931102888867026, "loss": 4.803, "step": 1309 }, { "epoch": 1.5, "grad_norm": 1.1908911280457426, "learning_rate": 0.0004929402485323476, "loss": 4.9085, "step": 1310 }, { "epoch": 1.5, "grad_norm": 1.3934998226557072, "learning_rate": 0.0004927701024045849, "loss": 4.8782, "step": 1311 }, { "epoch": 1.5, "grad_norm": 0.5446495349752738, "learning_rate": 0.0004925998505966921, "loss": 4.6614, "step": 1312 }, { "epoch": 1.5, "grad_norm": 1.3846799533531913, "learning_rate": 0.0004924294932020039, "loss": 4.8579, "step": 1313 }, { "epoch": 1.5, "grad_norm": 0.4293905552806972, "learning_rate": 0.0004922590303139139, "loss": 4.8049, "step": 1314 }, { "epoch": 1.5, "grad_norm": 1.304156679243488, "learning_rate": 0.0004920884620258726, "loss": 4.5957, "step": 1315 }, { "epoch": 1.5, "grad_norm": 0.5510272422892712, "learning_rate": 0.0004919177884313891, "loss": 4.875, "step": 1316 }, { "epoch": 1.51, "grad_norm": 1.367319332203774, "learning_rate": 0.0004917470096240293, "loss": 4.8574, "step": 1317 }, { "epoch": 1.51, "grad_norm": 0.47503688442215297, "learning_rate": 0.0004915761256974177, "loss": 4.7461, "step": 1318 }, { "epoch": 1.51, "grad_norm": 1.1431459999872369, "learning_rate": 0.0004914051367452358, "loss": 4.8458, "step": 1319 }, { "epoch": 1.51, "grad_norm": 0.437267377825884, "learning_rate": 0.000491234042861223, "loss": 4.725, "step": 1320 }, { "epoch": 1.51, "grad_norm": 0.8747912964496404, "learning_rate": 0.0004910628441391761, "loss": 4.7648, "step": 1321 }, { "epoch": 1.51, "grad_norm": 0.8521175772691112, "learning_rate": 0.0004908915406729494, "loss": 4.6996, "step": 1322 }, { "epoch": 1.51, "grad_norm": 1.053915445675989, "learning_rate": 0.0004907201325564547, "loss": 4.6883, "step": 1323 }, { "epoch": 1.51, "grad_norm": 0.6778016050191935, "learning_rate": 0.0004905486198836609, "loss": 4.751, "step": 1324 }, { "epoch": 1.51, "grad_norm": 0.6869392776255355, "learning_rate": 0.0004903770027485947, "loss": 4.8649, "step": 1325 }, { "epoch": 1.52, "grad_norm": 0.74329793020045, "learning_rate": 0.0004902052812453395, "loss": 4.8407, "step": 1326 }, { "epoch": 1.52, "grad_norm": 1.0688334380602857, "learning_rate": 0.0004900334554680364, "loss": 4.8187, "step": 1327 }, { "epoch": 1.52, "grad_norm": 0.6038245273867472, "learning_rate": 0.0004898615255108834, "loss": 4.6202, "step": 1328 }, { "epoch": 1.52, "grad_norm": 1.5453462263854487, "learning_rate": 0.0004896894914681356, "loss": 4.6939, "step": 1329 }, { "epoch": 1.52, "grad_norm": 0.44444864107388815, "learning_rate": 0.0004895173534341053, "loss": 4.6116, "step": 1330 }, { "epoch": 1.52, "grad_norm": 2.036988184508873, "learning_rate": 0.0004893451115031618, "loss": 4.7477, "step": 1331 }, { "epoch": 1.52, "grad_norm": 1.0882518685638196, "learning_rate": 0.0004891727657697312, "loss": 4.6446, "step": 1332 }, { "epoch": 1.52, "grad_norm": 1.0628123204130204, "learning_rate": 0.0004890003163282968, "loss": 4.769, "step": 1333 }, { "epoch": 1.52, "grad_norm": 0.6698103215131185, "learning_rate": 0.0004888277632733983, "loss": 4.3971, "step": 1334 }, { "epoch": 1.53, "grad_norm": 2.6262767320960543, "learning_rate": 0.0004886551066996328, "loss": 4.6933, "step": 1335 }, { "epoch": 1.53, "grad_norm": 3.155644456002479, "learning_rate": 0.0004884823467016535, "loss": 5.0961, "step": 1336 }, { "epoch": 1.53, "grad_norm": 0.9896598010878288, "learning_rate": 0.0004883094833741708, "loss": 4.9325, "step": 1337 }, { "epoch": 1.53, "grad_norm": 1.7628983777222198, "learning_rate": 0.0004881365168119515, "loss": 4.8234, "step": 1338 }, { "epoch": 1.53, "grad_norm": 2.2339061368749555, "learning_rate": 0.00048796344710981917, "loss": 4.68, "step": 1339 }, { "epoch": 1.53, "grad_norm": 1.8439691112891878, "learning_rate": 0.0004877902743626537, "loss": 4.9194, "step": 1340 }, { "epoch": 1.53, "grad_norm": 2.2315756389591717, "learning_rate": 0.0004876169986653915, "loss": 5.1379, "step": 1341 }, { "epoch": 1.53, "grad_norm": 0.7000102601826791, "learning_rate": 0.00048744362011302587, "loss": 4.8732, "step": 1342 }, { "epoch": 1.53, "grad_norm": 3.2558031191071217, "learning_rate": 0.0004872701388006057, "loss": 4.7404, "step": 1343 }, { "epoch": 1.54, "grad_norm": 0.9742743382110485, "learning_rate": 0.00048709655482323695, "loss": 4.7395, "step": 1344 }, { "epoch": 1.54, "grad_norm": 2.555355960934867, "learning_rate": 0.00048692286827608144, "loss": 4.8071, "step": 1345 }, { "epoch": 1.54, "grad_norm": 0.876899528784007, "learning_rate": 0.00048674907925435744, "loss": 4.5844, "step": 1346 }, { "epoch": 1.54, "grad_norm": 6.88872568308708, "learning_rate": 0.0004865751878533391, "loss": 5.1362, "step": 1347 }, { "epoch": 1.54, "grad_norm": 2.356039397813764, "learning_rate": 0.0004864011941683571, "loss": 4.9453, "step": 1348 }, { "epoch": 1.54, "grad_norm": 2.4333170809079694, "learning_rate": 0.0004862270982947979, "loss": 5.1346, "step": 1349 }, { "epoch": 1.54, "grad_norm": 1.0516138116267888, "learning_rate": 0.0004860529003281041, "loss": 5.0118, "step": 1350 }, { "epoch": 1.54, "grad_norm": 1.4618361571732472, "learning_rate": 0.00048587860036377424, "loss": 5.2146, "step": 1351 }, { "epoch": 1.55, "grad_norm": 1.356003512472729, "learning_rate": 0.00048570419849736275, "loss": 5.0642, "step": 1352 }, { "epoch": 1.55, "grad_norm": 1.0395091003090329, "learning_rate": 0.00048552969482447997, "loss": 5.0751, "step": 1353 }, { "epoch": 1.55, "grad_norm": 1.767687223085626, "learning_rate": 0.00048535508944079204, "loss": 5.2233, "step": 1354 }, { "epoch": 1.55, "grad_norm": 1.6389836781127758, "learning_rate": 0.0004851803824420209, "loss": 4.976, "step": 1355 }, { "epoch": 1.55, "grad_norm": 0.6035947210613768, "learning_rate": 0.0004850055739239441, "loss": 4.8781, "step": 1356 }, { "epoch": 1.55, "grad_norm": 1.2327314573399948, "learning_rate": 0.00048483066398239484, "loss": 5.0509, "step": 1357 }, { "epoch": 1.55, "grad_norm": 1.0548462389875854, "learning_rate": 0.0004846556527132621, "loss": 4.9994, "step": 1358 }, { "epoch": 1.55, "grad_norm": 0.7759647235778956, "learning_rate": 0.0004844805402124902, "loss": 4.8225, "step": 1359 }, { "epoch": 1.55, "grad_norm": 0.6492476032985707, "learning_rate": 0.0004843053265760792, "loss": 4.9365, "step": 1360 }, { "epoch": 1.56, "grad_norm": 1.1652259501079139, "learning_rate": 0.00048413001190008426, "loss": 4.9432, "step": 1361 }, { "epoch": 1.56, "grad_norm": 0.8770466793294488, "learning_rate": 0.0004839545962806161, "loss": 4.9574, "step": 1362 }, { "epoch": 1.56, "grad_norm": 0.742162330247459, "learning_rate": 0.0004837790798138411, "loss": 4.8017, "step": 1363 }, { "epoch": 1.56, "grad_norm": 0.9097192443886456, "learning_rate": 0.00048360346259598033, "loss": 5.009, "step": 1364 }, { "epoch": 1.56, "grad_norm": 0.6557750179809073, "learning_rate": 0.00048342774472331056, "loss": 4.7991, "step": 1365 }, { "epoch": 1.56, "grad_norm": 0.9866823245433447, "learning_rate": 0.00048325192629216333, "loss": 5.0082, "step": 1366 }, { "epoch": 1.56, "grad_norm": 0.89149319123167, "learning_rate": 0.0004830760073989259, "loss": 4.8187, "step": 1367 }, { "epoch": 1.56, "grad_norm": 0.6895872759546015, "learning_rate": 0.00048289998814003997, "loss": 4.8978, "step": 1368 }, { "epoch": 1.56, "grad_norm": 0.7464817420797909, "learning_rate": 0.00048272386861200275, "loss": 4.9383, "step": 1369 }, { "epoch": 1.57, "grad_norm": 0.7182235230206421, "learning_rate": 0.0004825476489113661, "loss": 4.9526, "step": 1370 }, { "epoch": 1.57, "grad_norm": 0.464471966305937, "learning_rate": 0.000482371329134737, "loss": 4.8158, "step": 1371 }, { "epoch": 1.57, "grad_norm": 0.8729558374903325, "learning_rate": 0.00048219490937877706, "loss": 4.6788, "step": 1372 }, { "epoch": 1.57, "grad_norm": 0.5584509883951888, "learning_rate": 0.0004820183897402029, "loss": 4.7441, "step": 1373 }, { "epoch": 1.57, "grad_norm": 0.48636883545966814, "learning_rate": 0.00048184177031578604, "loss": 4.7471, "step": 1374 }, { "epoch": 1.57, "grad_norm": 0.6509337508930622, "learning_rate": 0.00048166505120235224, "loss": 4.8365, "step": 1375 }, { "epoch": 1.57, "grad_norm": 0.44559332057989465, "learning_rate": 0.0004814882324967824, "loss": 4.8401, "step": 1376 }, { "epoch": 1.57, "grad_norm": 0.5311355603205702, "learning_rate": 0.0004813113142960116, "loss": 4.7636, "step": 1377 }, { "epoch": 1.57, "grad_norm": 0.48411680752815783, "learning_rate": 0.0004811342966970298, "loss": 4.6858, "step": 1378 }, { "epoch": 1.58, "grad_norm": 0.3805785910338646, "learning_rate": 0.0004809571797968812, "loss": 4.6444, "step": 1379 }, { "epoch": 1.58, "grad_norm": 0.7448640827646127, "learning_rate": 0.0004807799636926648, "loss": 4.8185, "step": 1380 }, { "epoch": 1.58, "grad_norm": 0.5174703093227337, "learning_rate": 0.00048060264848153345, "loss": 4.6472, "step": 1381 }, { "epoch": 1.58, "grad_norm": 0.44953261244096343, "learning_rate": 0.00048042523426069486, "loss": 4.6392, "step": 1382 }, { "epoch": 1.58, "grad_norm": 0.49875075243037065, "learning_rate": 0.0004802477211274106, "loss": 4.6904, "step": 1383 }, { "epoch": 1.58, "grad_norm": 0.45890908478755144, "learning_rate": 0.0004800701091789968, "loss": 4.6999, "step": 1384 }, { "epoch": 1.58, "grad_norm": 0.4795545764168666, "learning_rate": 0.00047989239851282343, "loss": 4.7719, "step": 1385 }, { "epoch": 1.58, "grad_norm": 0.4164235961384302, "learning_rate": 0.00047971458922631494, "loss": 4.8652, "step": 1386 }, { "epoch": 1.59, "grad_norm": 0.5952719571367953, "learning_rate": 0.00047953668141694953, "loss": 4.6569, "step": 1387 }, { "epoch": 1.59, "grad_norm": 0.47208228718784045, "learning_rate": 0.0004793586751822596, "loss": 4.9877, "step": 1388 }, { "epoch": 1.59, "grad_norm": 0.4928132344293389, "learning_rate": 0.00047918057061983145, "loss": 4.7921, "step": 1389 }, { "epoch": 1.59, "grad_norm": 0.5765504772480727, "learning_rate": 0.00047900236782730536, "loss": 4.6866, "step": 1390 }, { "epoch": 1.59, "grad_norm": 0.3994470284200598, "learning_rate": 0.0004788240669023752, "loss": 4.7505, "step": 1391 }, { "epoch": 1.59, "grad_norm": 0.4111779963764068, "learning_rate": 0.00047864566794278917, "loss": 4.7897, "step": 1392 }, { "epoch": 1.59, "grad_norm": 0.880224665976422, "learning_rate": 0.0004784671710463486, "loss": 4.7381, "step": 1393 }, { "epoch": 1.59, "grad_norm": 0.6544814334197376, "learning_rate": 0.00047828857631090866, "loss": 4.8309, "step": 1394 }, { "epoch": 1.59, "grad_norm": 0.8779247759855048, "learning_rate": 0.00047810988383437873, "loss": 4.5432, "step": 1395 }, { "epoch": 1.6, "grad_norm": 0.5696889940774883, "learning_rate": 0.00047793109371472095, "loss": 4.8037, "step": 1396 }, { "epoch": 1.6, "grad_norm": 0.7494605259976475, "learning_rate": 0.0004777522060499515, "loss": 4.5989, "step": 1397 }, { "epoch": 1.6, "grad_norm": 0.405849873533486, "learning_rate": 0.00047757322093813987, "loss": 4.6336, "step": 1398 }, { "epoch": 1.6, "grad_norm": 0.8550914982094713, "learning_rate": 0.00047739413847740903, "loss": 4.6241, "step": 1399 }, { "epoch": 1.6, "grad_norm": 0.5558949521132047, "learning_rate": 0.0004772149587659353, "loss": 4.6385, "step": 1400 }, { "epoch": 1.6, "eval_blimp_filtered_avg": 0.5486567164179105, "eval_blimp_filtered_std": 0.005423902078597133, "step": 1400 }, { "epoch": 1.6, "eval_blimp_supplement_avg": 0.4978448275862069, "eval_blimp_supplement_std": 0.02262365955039507, "step": 1400 }, { "epoch": 1.6, "eval_vqa_filtered_avg": 0.12, "eval_vqa_filtered_std": 0.03265986323710906, "step": 1400 }, { "epoch": 1.6, "eval_winoground_filtered_avg": 0.55, "eval_winoground_filtered_std": 0.05, "step": 1400 }, { "epoch": 1.6, "grad_norm": 0.8336343032944394, "learning_rate": 0.00047703568190194827, "loss": 4.6168, "step": 1401 }, { "epoch": 1.6, "grad_norm": 0.3977989934017465, "learning_rate": 0.0004768563079837308, "loss": 4.6656, "step": 1402 }, { "epoch": 1.6, "grad_norm": 0.48833622544829103, "learning_rate": 0.00047667683710961913, "loss": 4.6639, "step": 1403 }, { "epoch": 1.6, "grad_norm": 0.6201376658756721, "learning_rate": 0.0004764972693780023, "loss": 4.6015, "step": 1404 }, { "epoch": 1.61, "grad_norm": 0.45060823558163343, "learning_rate": 0.0004763176048873229, "loss": 4.465, "step": 1405 }, { "epoch": 1.61, "grad_norm": 0.5513200057615996, "learning_rate": 0.0004761378437360761, "loss": 4.6264, "step": 1406 }, { "epoch": 1.61, "grad_norm": 0.5303636884422888, "learning_rate": 0.0004759579860228103, "loss": 4.7561, "step": 1407 }, { "epoch": 1.61, "grad_norm": 0.5828719715564574, "learning_rate": 0.00047577803184612697, "loss": 4.7605, "step": 1408 }, { "epoch": 1.61, "grad_norm": 0.491684759345481, "learning_rate": 0.0004755979813046803, "loss": 4.5292, "step": 1409 }, { "epoch": 1.61, "grad_norm": 0.5700256073475649, "learning_rate": 0.00047541783449717715, "loss": 4.5218, "step": 1410 }, { "epoch": 1.61, "grad_norm": 0.690719867302228, "learning_rate": 0.00047523759152237746, "loss": 4.7592, "step": 1411 }, { "epoch": 1.61, "grad_norm": 0.4783347732188243, "learning_rate": 0.0004750572524790938, "loss": 4.5417, "step": 1412 }, { "epoch": 1.61, "grad_norm": 1.311904722278894, "learning_rate": 0.0004748768174661912, "loss": 4.7346, "step": 1413 }, { "epoch": 1.62, "grad_norm": 2.039716183102799, "learning_rate": 0.0004746962865825876, "loss": 4.5815, "step": 1414 }, { "epoch": 1.62, "grad_norm": 0.7763142112437567, "learning_rate": 0.0004745156599272533, "loss": 4.5282, "step": 1415 }, { "epoch": 1.62, "grad_norm": 1.2327429361222049, "learning_rate": 0.00047433493759921124, "loss": 4.522, "step": 1416 }, { "epoch": 1.62, "grad_norm": 1.022801721757132, "learning_rate": 0.00047415411969753673, "loss": 4.4127, "step": 1417 }, { "epoch": 1.62, "grad_norm": 1.5485097704381883, "learning_rate": 0.0004739732063213574, "loss": 4.5386, "step": 1418 }, { "epoch": 1.62, "grad_norm": 0.4633553754141256, "learning_rate": 0.0004737921975698533, "loss": 4.6599, "step": 1419 }, { "epoch": 1.62, "grad_norm": 1.0682156168501629, "learning_rate": 0.0004736110935422568, "loss": 4.6262, "step": 1420 }, { "epoch": 1.62, "grad_norm": 0.5875852940501651, "learning_rate": 0.0004734298943378526, "loss": 4.4744, "step": 1421 }, { "epoch": 1.63, "grad_norm": 0.7184393812421046, "learning_rate": 0.0004732486000559773, "loss": 4.6543, "step": 1422 }, { "epoch": 1.63, "grad_norm": 0.459657755517521, "learning_rate": 0.00047306721079601983, "loss": 4.5357, "step": 1423 }, { "epoch": 1.63, "grad_norm": 0.7069916733163126, "learning_rate": 0.0004728857266574211, "loss": 4.486, "step": 1424 }, { "epoch": 1.63, "grad_norm": 0.42824071906555994, "learning_rate": 0.0004727041477396741, "loss": 4.4179, "step": 1425 }, { "epoch": 1.63, "grad_norm": 0.6996728013593154, "learning_rate": 0.00047252247414232367, "loss": 4.6625, "step": 1426 }, { "epoch": 1.63, "grad_norm": 0.6613682009533245, "learning_rate": 0.0004723407059649668, "loss": 4.4985, "step": 1427 }, { "epoch": 1.63, "grad_norm": 0.5157407781149183, "learning_rate": 0.0004721588433072519, "loss": 4.5895, "step": 1428 }, { "epoch": 1.63, "grad_norm": 0.7048944820456728, "learning_rate": 0.0004719768862688798, "loss": 4.7225, "step": 1429 }, { "epoch": 1.63, "grad_norm": 0.6262316373051795, "learning_rate": 0.0004717948349496023, "loss": 4.5735, "step": 1430 }, { "epoch": 1.64, "grad_norm": 0.8522926897856793, "learning_rate": 0.0004716126894492236, "loss": 4.583, "step": 1431 }, { "epoch": 1.64, "grad_norm": 0.953880313702566, "learning_rate": 0.0004714304498675991, "loss": 4.7722, "step": 1432 }, { "epoch": 1.64, "grad_norm": 0.7744020269228191, "learning_rate": 0.000471248116304636, "loss": 4.6082, "step": 1433 }, { "epoch": 1.64, "grad_norm": 1.1055975363378021, "learning_rate": 0.00047106568886029276, "loss": 4.5719, "step": 1434 }, { "epoch": 1.64, "grad_norm": 0.7480413036131542, "learning_rate": 0.0004708831676345796, "loss": 4.6206, "step": 1435 }, { "epoch": 1.64, "grad_norm": 1.408364565913358, "learning_rate": 0.00047070055272755795, "loss": 4.4276, "step": 1436 }, { "epoch": 1.64, "grad_norm": 1.21661965567842, "learning_rate": 0.00047051784423934087, "loss": 4.6314, "step": 1437 }, { "epoch": 1.64, "grad_norm": 0.7983588404581601, "learning_rate": 0.00047033504227009216, "loss": 4.5995, "step": 1438 }, { "epoch": 1.64, "grad_norm": 1.1885033725376346, "learning_rate": 0.0004701521469200277, "loss": 4.6225, "step": 1439 }, { "epoch": 1.65, "grad_norm": 0.7010293270107962, "learning_rate": 0.0004699691582894137, "loss": 4.7033, "step": 1440 }, { "epoch": 1.65, "grad_norm": 0.8026210036924896, "learning_rate": 0.0004697860764785681, "loss": 4.6279, "step": 1441 }, { "epoch": 1.65, "grad_norm": 1.292836579976248, "learning_rate": 0.0004696029015878596, "loss": 4.625, "step": 1442 }, { "epoch": 1.65, "grad_norm": 1.0391875969159525, "learning_rate": 0.0004694196337177082, "loss": 4.4795, "step": 1443 }, { "epoch": 1.65, "grad_norm": 0.6960164897456129, "learning_rate": 0.0004692362729685847, "loss": 4.5121, "step": 1444 }, { "epoch": 1.65, "grad_norm": 1.271940700352897, "learning_rate": 0.00046905281944101084, "loss": 4.6049, "step": 1445 }, { "epoch": 1.65, "grad_norm": 0.7326496801015598, "learning_rate": 0.0004688692732355592, "loss": 4.6007, "step": 1446 }, { "epoch": 1.65, "grad_norm": 2.1452438217738163, "learning_rate": 0.00046868563445285315, "loss": 4.6451, "step": 1447 }, { "epoch": 1.65, "grad_norm": 1.2998642561112748, "learning_rate": 0.00046850190319356705, "loss": 4.5305, "step": 1448 }, { "epoch": 1.66, "grad_norm": 1.4626702907678346, "learning_rate": 0.00046831807955842556, "loss": 4.6695, "step": 1449 }, { "epoch": 1.66, "grad_norm": 1.6534669079528632, "learning_rate": 0.00046813416364820435, "loss": 4.6443, "step": 1450 }, { "epoch": 1.66, "grad_norm": 1.0688731454564804, "learning_rate": 0.0004679501555637294, "loss": 4.5762, "step": 1451 }, { "epoch": 1.66, "grad_norm": 2.09981490315287, "learning_rate": 0.0004677660554058774, "loss": 4.4893, "step": 1452 }, { "epoch": 1.66, "grad_norm": 0.5100514339572236, "learning_rate": 0.00046758186327557544, "loss": 4.6868, "step": 1453 }, { "epoch": 1.66, "grad_norm": 1.2492471971514083, "learning_rate": 0.00046739757927380106, "loss": 4.3353, "step": 1454 }, { "epoch": 1.66, "grad_norm": 0.5469293110300977, "learning_rate": 0.0004672132035015822, "loss": 4.5055, "step": 1455 }, { "epoch": 1.66, "grad_norm": 1.2579870590866056, "learning_rate": 0.00046702873605999694, "loss": 4.5368, "step": 1456 }, { "epoch": 1.67, "grad_norm": 0.5958553573610544, "learning_rate": 0.00046684417705017384, "loss": 4.4653, "step": 1457 }, { "epoch": 1.67, "grad_norm": 1.2281957190731483, "learning_rate": 0.0004666595265732916, "loss": 4.7814, "step": 1458 }, { "epoch": 1.67, "grad_norm": 0.4900740490343557, "learning_rate": 0.0004664747847305789, "loss": 4.4939, "step": 1459 }, { "epoch": 1.67, "grad_norm": 1.0841072960419629, "learning_rate": 0.00046628995162331465, "loss": 4.499, "step": 1460 }, { "epoch": 1.67, "grad_norm": 0.5404246112370104, "learning_rate": 0.0004661050273528278, "loss": 4.4651, "step": 1461 }, { "epoch": 1.67, "grad_norm": 0.5472302297342864, "learning_rate": 0.00046592001202049727, "loss": 4.551, "step": 1462 }, { "epoch": 1.67, "grad_norm": 0.5579220926830852, "learning_rate": 0.0004657349057277518, "loss": 4.5228, "step": 1463 }, { "epoch": 1.67, "grad_norm": 0.8373877029205464, "learning_rate": 0.00046554970857607027, "loss": 4.4877, "step": 1464 }, { "epoch": 1.67, "grad_norm": 0.6552871235740625, "learning_rate": 0.000465364420666981, "loss": 4.385, "step": 1465 }, { "epoch": 1.68, "grad_norm": 0.5533089422614667, "learning_rate": 0.00046517904210206226, "loss": 4.4582, "step": 1466 }, { "epoch": 1.68, "grad_norm": 0.5272674926590186, "learning_rate": 0.00046499357298294204, "loss": 4.528, "step": 1467 }, { "epoch": 1.68, "grad_norm": 0.9381489017405934, "learning_rate": 0.00046480801341129794, "loss": 4.448, "step": 1468 }, { "epoch": 1.68, "grad_norm": 1.0140125859524247, "learning_rate": 0.00046462236348885716, "loss": 4.4023, "step": 1469 }, { "epoch": 1.68, "grad_norm": 0.7254247415004822, "learning_rate": 0.00046443662331739643, "loss": 4.546, "step": 1470 }, { "epoch": 1.68, "grad_norm": 0.5818023262302461, "learning_rate": 0.00046425079299874185, "loss": 4.5079, "step": 1471 }, { "epoch": 1.68, "grad_norm": 0.8144083944954458, "learning_rate": 0.00046406487263476917, "loss": 4.4881, "step": 1472 }, { "epoch": 1.68, "grad_norm": 0.6603133548722864, "learning_rate": 0.00046387886232740326, "loss": 4.4152, "step": 1473 }, { "epoch": 1.68, "grad_norm": 1.1950135868753649, "learning_rate": 0.0004636927621786184, "loss": 4.4233, "step": 1474 }, { "epoch": 1.69, "grad_norm": 0.426835645225259, "learning_rate": 0.0004635065722904381, "loss": 4.388, "step": 1475 }, { "epoch": 1.69, "grad_norm": 1.098018527034672, "learning_rate": 0.0004633202927649352, "loss": 4.5237, "step": 1476 }, { "epoch": 1.69, "grad_norm": 1.0111653860246692, "learning_rate": 0.00046313392370423155, "loss": 4.3329, "step": 1477 }, { "epoch": 1.69, "grad_norm": 0.5425928466560297, "learning_rate": 0.0004629474652104981, "loss": 4.3738, "step": 1478 }, { "epoch": 1.69, "grad_norm": 0.44346157691869187, "learning_rate": 0.00046276091738595485, "loss": 4.5344, "step": 1479 }, { "epoch": 1.69, "grad_norm": 0.6908299364714499, "learning_rate": 0.0004625742803328708, "loss": 4.2441, "step": 1480 }, { "epoch": 1.69, "grad_norm": 0.8386324132653891, "learning_rate": 0.00046238755415356367, "loss": 4.5414, "step": 1481 }, { "epoch": 1.69, "grad_norm": 0.7794293168957607, "learning_rate": 0.0004622007389504003, "loss": 4.4319, "step": 1482 }, { "epoch": 1.69, "grad_norm": 0.47208493229831217, "learning_rate": 0.00046201383482579627, "loss": 4.3757, "step": 1483 }, { "epoch": 1.7, "grad_norm": 1.0444789947063238, "learning_rate": 0.00046182684188221583, "loss": 4.5969, "step": 1484 }, { "epoch": 1.7, "grad_norm": 1.333137218371951, "learning_rate": 0.000461639760222172, "loss": 4.5076, "step": 1485 }, { "epoch": 1.7, "grad_norm": 0.5584816850865542, "learning_rate": 0.0004614525899482265, "loss": 4.4563, "step": 1486 }, { "epoch": 1.7, "grad_norm": 1.976026380170844, "learning_rate": 0.00046126533116298927, "loss": 4.6841, "step": 1487 }, { "epoch": 1.7, "grad_norm": 1.9504723397362387, "learning_rate": 0.0004610779839691192, "loss": 4.5912, "step": 1488 }, { "epoch": 1.7, "grad_norm": 1.1938994740564937, "learning_rate": 0.00046089054846932344, "loss": 4.4707, "step": 1489 }, { "epoch": 1.7, "grad_norm": 0.6005844219105226, "learning_rate": 0.0004607030247663577, "loss": 4.4557, "step": 1490 }, { "epoch": 1.7, "grad_norm": 0.9278027705042341, "learning_rate": 0.00046051541296302575, "loss": 4.5477, "step": 1491 }, { "epoch": 1.71, "grad_norm": 0.5144290433547679, "learning_rate": 0.00046032771316218, "loss": 4.5463, "step": 1492 }, { "epoch": 1.71, "grad_norm": 0.8503685862660372, "learning_rate": 0.0004601399254667208, "loss": 4.5529, "step": 1493 }, { "epoch": 1.71, "grad_norm": 0.8315915348155466, "learning_rate": 0.0004599520499795971, "loss": 4.3742, "step": 1494 }, { "epoch": 1.71, "grad_norm": 0.6319480784344171, "learning_rate": 0.0004597640868038054, "loss": 4.4062, "step": 1495 }, { "epoch": 1.71, "grad_norm": 0.7561568061366876, "learning_rate": 0.00045957603604239076, "loss": 4.6252, "step": 1496 }, { "epoch": 1.71, "grad_norm": 0.5497514378646382, "learning_rate": 0.00045938789779844595, "loss": 4.324, "step": 1497 }, { "epoch": 1.71, "grad_norm": 0.6226330913796483, "learning_rate": 0.00045919967217511204, "loss": 4.4532, "step": 1498 }, { "epoch": 1.71, "grad_norm": 0.48237361176527244, "learning_rate": 0.00045901135927557755, "loss": 4.467, "step": 1499 }, { "epoch": 1.71, "grad_norm": 0.9372067902547772, "learning_rate": 0.0004588229592030793, "loss": 4.4392, "step": 1500 }, { "epoch": 1.71, "eval_blimp_filtered_avg": 0.5313432835820896, "eval_blimp_filtered_std": 0.005503755937770046, "step": 1500 }, { "epoch": 1.71, "eval_blimp_supplement_avg": 0.5280172413793104, "eval_blimp_supplement_std": 0.02282582338755408, "step": 1500 }, { "epoch": 1.71, "eval_vqa_filtered_avg": 0.32, "eval_vqa_filtered_std": 0.04688261722621503, "step": 1500 }, { "epoch": 1.71, "eval_winoground_filtered_avg": 0.49, "eval_winoground_filtered_std": 0.05024183937956912, "step": 1500 }, { "epoch": 1.72, "grad_norm": 0.654145061622795, "learning_rate": 0.0004586344720609016, "loss": 4.6077, "step": 1501 }, { "epoch": 1.72, "grad_norm": 0.526227217844653, "learning_rate": 0.0004584458979523766, "loss": 4.5942, "step": 1502 }, { "epoch": 1.72, "grad_norm": 0.5786092814506665, "learning_rate": 0.0004582572369808841, "loss": 4.5841, "step": 1503 }, { "epoch": 1.72, "grad_norm": 0.4992249834821379, "learning_rate": 0.0004580684892498514, "loss": 4.689, "step": 1504 }, { "epoch": 1.72, "grad_norm": 0.7093856690445237, "learning_rate": 0.00045787965486275377, "loss": 4.5217, "step": 1505 }, { "epoch": 1.72, "grad_norm": 0.949826681657814, "learning_rate": 0.00045769073392311344, "loss": 4.3849, "step": 1506 }, { "epoch": 1.72, "grad_norm": 0.5645535110299973, "learning_rate": 0.0004575017265345005, "loss": 4.6325, "step": 1507 }, { "epoch": 1.72, "grad_norm": 1.1033980958579388, "learning_rate": 0.00045731263280053227, "loss": 4.5902, "step": 1508 }, { "epoch": 1.72, "grad_norm": 0.9327253236530959, "learning_rate": 0.00045712345282487344, "loss": 4.5289, "step": 1509 }, { "epoch": 1.73, "grad_norm": 2.1436300111871147, "learning_rate": 0.000456934186711236, "loss": 4.4449, "step": 1510 }, { "epoch": 1.73, "grad_norm": 0.5817931763904416, "learning_rate": 0.000456744834563379, "loss": 4.5626, "step": 1511 }, { "epoch": 1.73, "grad_norm": 2.1372299532619667, "learning_rate": 0.0004565553964851087, "loss": 4.6252, "step": 1512 }, { "epoch": 1.73, "grad_norm": 1.3881038760533755, "learning_rate": 0.000456365872580279, "loss": 4.499, "step": 1513 }, { "epoch": 1.73, "grad_norm": 1.0589549281892254, "learning_rate": 0.00045617626295279, "loss": 4.3902, "step": 1514 }, { "epoch": 1.73, "grad_norm": 0.7382290860503711, "learning_rate": 0.0004559865677065892, "loss": 4.6045, "step": 1515 }, { "epoch": 1.73, "grad_norm": 1.423513580311941, "learning_rate": 0.00045579678694567124, "loss": 4.5644, "step": 1516 }, { "epoch": 1.73, "grad_norm": 0.8946698768063079, "learning_rate": 0.00045560692077407743, "loss": 4.5081, "step": 1517 }, { "epoch": 1.73, "grad_norm": 1.4862550637998149, "learning_rate": 0.0004554169692958958, "loss": 4.5511, "step": 1518 }, { "epoch": 1.74, "grad_norm": 1.0756857163605837, "learning_rate": 0.00045522693261526115, "loss": 4.4308, "step": 1519 }, { "epoch": 1.74, "grad_norm": 1.5185065081935016, "learning_rate": 0.00045503681083635544, "loss": 4.3477, "step": 1520 }, { "epoch": 1.74, "grad_norm": 0.9903672820407596, "learning_rate": 0.0004548466040634066, "loss": 4.5014, "step": 1521 }, { "epoch": 1.74, "grad_norm": 1.0489641368864815, "learning_rate": 0.0004546563124006898, "loss": 4.4527, "step": 1522 }, { "epoch": 1.74, "grad_norm": 0.533081796064669, "learning_rate": 0.0004544659359525261, "loss": 4.6019, "step": 1523 }, { "epoch": 1.74, "grad_norm": 2.1273933593012666, "learning_rate": 0.00045427547482328363, "loss": 4.655, "step": 1524 }, { "epoch": 1.74, "grad_norm": 0.8852063385845723, "learning_rate": 0.0004540849291173766, "loss": 4.3868, "step": 1525 }, { "epoch": 1.74, "grad_norm": 2.109922658905615, "learning_rate": 0.00045389429893926574, "loss": 4.2372, "step": 1526 }, { "epoch": 1.75, "grad_norm": 0.9360131719277137, "learning_rate": 0.000453703584393458, "loss": 4.5194, "step": 1527 }, { "epoch": 1.75, "grad_norm": 2.764261241191548, "learning_rate": 0.0004535127855845067, "loss": 4.4637, "step": 1528 }, { "epoch": 1.75, "grad_norm": 2.0557846456704674, "learning_rate": 0.00045332190261701115, "loss": 4.6643, "step": 1529 }, { "epoch": 1.75, "grad_norm": 1.5941633237900894, "learning_rate": 0.000453130935595617, "loss": 4.6425, "step": 1530 }, { "epoch": 1.75, "grad_norm": 1.846811508636747, "learning_rate": 0.0004529398846250158, "loss": 4.4481, "step": 1531 }, { "epoch": 1.75, "grad_norm": 1.452209922559483, "learning_rate": 0.00045274874980994536, "loss": 4.5801, "step": 1532 }, { "epoch": 1.75, "grad_norm": 1.8002040359388272, "learning_rate": 0.0004525575312551892, "loss": 4.5893, "step": 1533 }, { "epoch": 1.75, "grad_norm": 1.5718974262300127, "learning_rate": 0.0004523662290655769, "loss": 4.6487, "step": 1534 }, { "epoch": 1.75, "grad_norm": 1.7654485645598723, "learning_rate": 0.0004521748433459839, "loss": 4.6022, "step": 1535 }, { "epoch": 1.76, "grad_norm": 1.0322585322051914, "learning_rate": 0.00045198337420133126, "loss": 4.5261, "step": 1536 }, { "epoch": 1.76, "grad_norm": 1.3814834071902213, "learning_rate": 0.00045179182173658606, "loss": 4.425, "step": 1537 }, { "epoch": 1.76, "grad_norm": 0.689913139266974, "learning_rate": 0.00045160018605676084, "loss": 4.3825, "step": 1538 }, { "epoch": 1.76, "grad_norm": 0.6607967865290028, "learning_rate": 0.0004514084672669137, "loss": 4.6327, "step": 1539 }, { "epoch": 1.76, "grad_norm": 0.6807913903453778, "learning_rate": 0.0004512166654721486, "loss": 4.489, "step": 1540 }, { "epoch": 1.76, "grad_norm": 0.6542090792430012, "learning_rate": 0.00045102478077761477, "loss": 4.4205, "step": 1541 }, { "epoch": 1.76, "grad_norm": 1.824480276696534, "learning_rate": 0.0004508328132885069, "loss": 4.5661, "step": 1542 }, { "epoch": 1.76, "grad_norm": 1.4592661596487153, "learning_rate": 0.0004506407631100651, "loss": 4.4651, "step": 1543 }, { "epoch": 1.76, "grad_norm": 0.7493008324865154, "learning_rate": 0.0004504486303475749, "loss": 4.8073, "step": 1544 }, { "epoch": 1.77, "grad_norm": 0.5991501039749012, "learning_rate": 0.00045025641510636704, "loss": 4.4707, "step": 1545 }, { "epoch": 1.77, "grad_norm": 1.1285217323236216, "learning_rate": 0.00045006411749181724, "loss": 4.6226, "step": 1546 }, { "epoch": 1.77, "grad_norm": 0.46591175013868713, "learning_rate": 0.000449871737609347, "loss": 4.5421, "step": 1547 }, { "epoch": 1.77, "grad_norm": 0.9424459584122187, "learning_rate": 0.0004496792755644221, "loss": 4.3357, "step": 1548 }, { "epoch": 1.77, "grad_norm": 0.666702380037904, "learning_rate": 0.0004494867314625542, "loss": 4.6317, "step": 1549 }, { "epoch": 1.77, "grad_norm": 0.5083531514679692, "learning_rate": 0.00044929410540929917, "loss": 4.535, "step": 1550 }, { "epoch": 1.77, "grad_norm": 0.4824459398322389, "learning_rate": 0.00044910139751025836, "loss": 4.6558, "step": 1551 }, { "epoch": 1.77, "grad_norm": 0.5733738462978704, "learning_rate": 0.00044890860787107783, "loss": 4.5481, "step": 1552 }, { "epoch": 1.77, "grad_norm": 0.5570605605961345, "learning_rate": 0.0004487157365974482, "loss": 4.6373, "step": 1553 }, { "epoch": 1.78, "grad_norm": 0.6037547014433808, "learning_rate": 0.00044852278379510534, "loss": 4.4533, "step": 1554 }, { "epoch": 1.78, "grad_norm": 0.5466371607699255, "learning_rate": 0.0004483297495698294, "loss": 4.2645, "step": 1555 }, { "epoch": 1.78, "grad_norm": 0.6836285309049864, "learning_rate": 0.00044813663402744537, "loss": 4.3832, "step": 1556 }, { "epoch": 1.78, "grad_norm": 0.7440476140350695, "learning_rate": 0.00044794343727382264, "loss": 4.5324, "step": 1557 }, { "epoch": 1.78, "grad_norm": 0.5972126201520713, "learning_rate": 0.0004477501594148753, "loss": 4.383, "step": 1558 }, { "epoch": 1.78, "grad_norm": 0.5664749444139975, "learning_rate": 0.0004475568005565619, "loss": 4.2492, "step": 1559 }, { "epoch": 1.78, "grad_norm": 0.4720967638929448, "learning_rate": 0.0004473633608048853, "loss": 4.4472, "step": 1560 }, { "epoch": 1.78, "grad_norm": 0.45218504991240904, "learning_rate": 0.00044716984026589275, "loss": 4.3195, "step": 1561 }, { "epoch": 1.79, "grad_norm": 0.5311894508382072, "learning_rate": 0.00044697623904567564, "loss": 4.4659, "step": 1562 }, { "epoch": 1.79, "grad_norm": 0.7004978387249767, "learning_rate": 0.00044678255725037, "loss": 4.4841, "step": 1563 }, { "epoch": 1.79, "grad_norm": 0.44525585533389095, "learning_rate": 0.00044658879498615556, "loss": 4.3947, "step": 1564 }, { "epoch": 1.79, "grad_norm": 0.8513477205579004, "learning_rate": 0.0004463949523592564, "loss": 4.427, "step": 1565 }, { "epoch": 1.79, "grad_norm": 0.6941682015123531, "learning_rate": 0.0004462010294759408, "loss": 4.5005, "step": 1566 }, { "epoch": 1.79, "grad_norm": 0.7912810140554617, "learning_rate": 0.00044600702644252065, "loss": 4.5903, "step": 1567 }, { "epoch": 1.79, "grad_norm": 0.7956141515129387, "learning_rate": 0.00044581294336535216, "loss": 4.3732, "step": 1568 }, { "epoch": 1.79, "grad_norm": 0.6347753028153407, "learning_rate": 0.00044561878035083506, "loss": 4.4276, "step": 1569 }, { "epoch": 1.79, "grad_norm": 0.8131680034655744, "learning_rate": 0.00044542453750541326, "loss": 4.4113, "step": 1570 }, { "epoch": 1.8, "grad_norm": 0.5531160249916168, "learning_rate": 0.00044523021493557424, "loss": 4.4114, "step": 1571 }, { "epoch": 1.8, "grad_norm": 0.6709155154628429, "learning_rate": 0.00044503581274784926, "loss": 4.6583, "step": 1572 }, { "epoch": 1.8, "grad_norm": 0.5006304854746432, "learning_rate": 0.00044484133104881306, "loss": 4.4336, "step": 1573 }, { "epoch": 1.8, "grad_norm": 0.553156417242004, "learning_rate": 0.00044464676994508424, "loss": 4.527, "step": 1574 }, { "epoch": 1.8, "grad_norm": 0.5218126318756796, "learning_rate": 0.0004444521295433247, "loss": 4.5587, "step": 1575 }, { "epoch": 1.8, "grad_norm": 0.6502480420225046, "learning_rate": 0.00044425740995023997, "loss": 4.6106, "step": 1576 }, { "epoch": 1.8, "grad_norm": 0.42275079682782113, "learning_rate": 0.0004440626112725789, "loss": 4.471, "step": 1577 }, { "epoch": 1.8, "grad_norm": 0.5336234341063297, "learning_rate": 0.00044386773361713365, "loss": 4.4296, "step": 1578 }, { "epoch": 1.8, "grad_norm": 0.8768322447889828, "learning_rate": 0.00044367277709073995, "loss": 4.4361, "step": 1579 }, { "epoch": 1.81, "grad_norm": 0.4953182098422311, "learning_rate": 0.0004434777418002763, "loss": 4.6317, "step": 1580 }, { "epoch": 1.81, "grad_norm": 0.6246272115043877, "learning_rate": 0.00044328262785266485, "loss": 4.4747, "step": 1581 }, { "epoch": 1.81, "grad_norm": 0.5755007673529107, "learning_rate": 0.0004430874353548706, "loss": 4.4884, "step": 1582 }, { "epoch": 1.81, "grad_norm": 0.7282181717800094, "learning_rate": 0.0004428921644139017, "loss": 4.4085, "step": 1583 }, { "epoch": 1.81, "grad_norm": 0.40345437354128066, "learning_rate": 0.00044269681513680925, "loss": 4.5598, "step": 1584 }, { "epoch": 1.81, "grad_norm": 0.49080899691349517, "learning_rate": 0.00044250138763068735, "loss": 4.4885, "step": 1585 }, { "epoch": 1.81, "grad_norm": 0.5832158170699938, "learning_rate": 0.0004423058820026729, "loss": 4.4993, "step": 1586 }, { "epoch": 1.81, "grad_norm": 0.7176815465231735, "learning_rate": 0.0004421102983599459, "loss": 4.5301, "step": 1587 }, { "epoch": 1.81, "grad_norm": 0.44961417394655306, "learning_rate": 0.00044191463680972877, "loss": 4.5264, "step": 1588 }, { "epoch": 1.82, "grad_norm": 1.2205992151088976, "learning_rate": 0.0004417188974592867, "loss": 4.5153, "step": 1589 }, { "epoch": 1.82, "grad_norm": 1.2555779788918737, "learning_rate": 0.00044152308041592775, "loss": 4.3809, "step": 1590 }, { "epoch": 1.82, "grad_norm": 1.0206842307813042, "learning_rate": 0.0004413271857870024, "loss": 4.4599, "step": 1591 }, { "epoch": 1.82, "grad_norm": 1.2798291072883572, "learning_rate": 0.0004411312136799038, "loss": 4.6609, "step": 1592 }, { "epoch": 1.82, "grad_norm": 0.9014588228099107, "learning_rate": 0.00044093516420206725, "loss": 4.3682, "step": 1593 }, { "epoch": 1.82, "grad_norm": 0.6508098069080454, "learning_rate": 0.0004407390374609709, "loss": 4.4755, "step": 1594 }, { "epoch": 1.82, "grad_norm": 1.2627251952775134, "learning_rate": 0.000440542833564135, "loss": 4.2805, "step": 1595 }, { "epoch": 1.82, "grad_norm": 0.5785295793103927, "learning_rate": 0.0004403465526191221, "loss": 4.3623, "step": 1596 }, { "epoch": 1.83, "grad_norm": 2.4499139141361086, "learning_rate": 0.00044015019473353707, "loss": 4.4762, "step": 1597 }, { "epoch": 1.83, "grad_norm": 2.8318889060683925, "learning_rate": 0.000439953760015027, "loss": 4.3095, "step": 1598 }, { "epoch": 1.83, "grad_norm": 0.8192322592907868, "learning_rate": 0.0004397572485712809, "loss": 4.3611, "step": 1599 }, { "epoch": 1.83, "grad_norm": 2.340115734256017, "learning_rate": 0.00043956066051003, "loss": 4.4017, "step": 1600 }, { "epoch": 1.83, "eval_blimp_filtered_avg": 0.5214925373134328, "eval_blimp_filtered_std": 0.0055171088893015184, "step": 1600 }, { "epoch": 1.83, "eval_blimp_supplement_avg": 0.540948275862069, "eval_blimp_supplement_std": 0.022895976379159827, "step": 1600 }, { "epoch": 1.83, "eval_vqa_filtered_avg": 0.3, "eval_vqa_filtered_std": 0.046056618647183814, "step": 1600 }, { "epoch": 1.83, "eval_winoground_filtered_avg": 0.53, "eval_winoground_filtered_std": 0.0501613558046592, "step": 1600 }, { "epoch": 1.83, "grad_norm": 1.1062711573457815, "learning_rate": 0.0004393639959390475, "loss": 4.378, "step": 1601 }, { "epoch": 1.83, "grad_norm": 1.7572324970881712, "learning_rate": 0.00043916725496614874, "loss": 4.4632, "step": 1602 }, { "epoch": 1.83, "grad_norm": 1.1753154767198906, "learning_rate": 0.00043897043769919054, "loss": 4.4543, "step": 1603 }, { "epoch": 1.83, "grad_norm": 0.6986362591812874, "learning_rate": 0.00043877354424607176, "loss": 4.3068, "step": 1604 }, { "epoch": 1.83, "grad_norm": 0.6796840040972915, "learning_rate": 0.00043857657471473314, "loss": 4.6881, "step": 1605 }, { "epoch": 1.84, "grad_norm": 0.9760170048085693, "learning_rate": 0.00043837952921315694, "loss": 4.4336, "step": 1606 }, { "epoch": 1.84, "grad_norm": 0.788095595336318, "learning_rate": 0.0004381824078493673, "loss": 4.5834, "step": 1607 }, { "epoch": 1.84, "grad_norm": 0.5240881010696307, "learning_rate": 0.0004379852107314295, "loss": 4.7793, "step": 1608 }, { "epoch": 1.84, "grad_norm": 0.6800203301251186, "learning_rate": 0.00043778793796745086, "loss": 4.441, "step": 1609 }, { "epoch": 1.84, "grad_norm": 0.8861323881164404, "learning_rate": 0.0004375905896655799, "loss": 4.5062, "step": 1610 }, { "epoch": 1.84, "grad_norm": 0.7560387351358162, "learning_rate": 0.00043739316593400667, "loss": 4.4551, "step": 1611 }, { "epoch": 1.84, "grad_norm": 0.5964493157160932, "learning_rate": 0.0004371956668809623, "loss": 4.5077, "step": 1612 }, { "epoch": 1.84, "grad_norm": 1.139274966933268, "learning_rate": 0.0004369980926147197, "loss": 4.5886, "step": 1613 }, { "epoch": 1.84, "grad_norm": 0.7724423684446049, "learning_rate": 0.00043680044324359243, "loss": 4.5777, "step": 1614 }, { "epoch": 1.85, "grad_norm": 1.8352952048050188, "learning_rate": 0.00043660271887593585, "loss": 4.4798, "step": 1615 }, { "epoch": 1.85, "grad_norm": 2.1520712907073194, "learning_rate": 0.0004364049196201458, "loss": 4.4537, "step": 1616 }, { "epoch": 1.85, "grad_norm": 0.7571214660564398, "learning_rate": 0.0004362070455846597, "loss": 4.3287, "step": 1617 }, { "epoch": 1.85, "grad_norm": 1.8137930679825731, "learning_rate": 0.00043600909687795564, "loss": 4.5292, "step": 1618 }, { "epoch": 1.85, "grad_norm": 1.3744957516032565, "learning_rate": 0.0004358110736085528, "loss": 4.4856, "step": 1619 }, { "epoch": 1.85, "grad_norm": 0.9668678109511869, "learning_rate": 0.00043561297588501124, "loss": 4.4445, "step": 1620 }, { "epoch": 1.85, "grad_norm": 0.6740960450757343, "learning_rate": 0.0004354148038159317, "loss": 4.3517, "step": 1621 }, { "epoch": 1.85, "grad_norm": 1.8289741952368377, "learning_rate": 0.0004352165575099558, "loss": 4.3395, "step": 1622 }, { "epoch": 1.85, "grad_norm": 1.3421842419217718, "learning_rate": 0.0004350182370757659, "loss": 4.4447, "step": 1623 }, { "epoch": 1.86, "grad_norm": 1.5527515320007677, "learning_rate": 0.00043481984262208465, "loss": 4.5282, "step": 1624 }, { "epoch": 1.86, "grad_norm": 1.8512202419733643, "learning_rate": 0.00043462137425767596, "loss": 4.2819, "step": 1625 }, { "epoch": 1.86, "grad_norm": 0.8913513992739794, "learning_rate": 0.00043442283209134364, "loss": 4.6412, "step": 1626 }, { "epoch": 1.86, "grad_norm": 0.5874133955296026, "learning_rate": 0.00043422421623193214, "loss": 4.5435, "step": 1627 }, { "epoch": 1.86, "grad_norm": 1.0951708805144085, "learning_rate": 0.0004340255267883264, "loss": 4.7743, "step": 1628 }, { "epoch": 1.86, "grad_norm": 0.8670436598001132, "learning_rate": 0.00043382676386945164, "loss": 4.3273, "step": 1629 }, { "epoch": 1.86, "grad_norm": 1.3655586090725098, "learning_rate": 0.00043362792758427335, "loss": 4.5056, "step": 1630 }, { "epoch": 1.86, "grad_norm": 0.6214217589923374, "learning_rate": 0.00043342901804179726, "loss": 4.4349, "step": 1631 }, { "epoch": 1.87, "grad_norm": 1.1083160195572712, "learning_rate": 0.00043323003535106924, "loss": 4.3371, "step": 1632 }, { "epoch": 1.87, "grad_norm": 1.0672355165574567, "learning_rate": 0.0004330309796211752, "loss": 4.2983, "step": 1633 }, { "epoch": 1.87, "grad_norm": 0.7224957724859657, "learning_rate": 0.0004328318509612414, "loss": 4.5616, "step": 1634 }, { "epoch": 1.87, "grad_norm": 1.0695952625647405, "learning_rate": 0.00043263264948043364, "loss": 4.4534, "step": 1635 }, { "epoch": 1.87, "grad_norm": 0.6704887843305182, "learning_rate": 0.0004324333752879579, "loss": 4.5169, "step": 1636 }, { "epoch": 1.87, "grad_norm": 0.7873972733995961, "learning_rate": 0.00043223402849306005, "loss": 4.4162, "step": 1637 }, { "epoch": 1.87, "grad_norm": 0.4120683543190572, "learning_rate": 0.00043203460920502565, "loss": 4.5158, "step": 1638 }, { "epoch": 1.87, "grad_norm": 0.9772876591209761, "learning_rate": 0.0004318351175331799, "loss": 4.4812, "step": 1639 }, { "epoch": 1.87, "grad_norm": 0.6957044394239216, "learning_rate": 0.00043163555358688796, "loss": 4.3274, "step": 1640 }, { "epoch": 1.88, "grad_norm": 1.5408209088733036, "learning_rate": 0.00043143591747555444, "loss": 4.5473, "step": 1641 }, { "epoch": 1.88, "grad_norm": 0.8075507081405351, "learning_rate": 0.0004312362093086236, "loss": 4.2734, "step": 1642 }, { "epoch": 1.88, "grad_norm": 0.775619692545571, "learning_rate": 0.000431036429195579, "loss": 4.4186, "step": 1643 }, { "epoch": 1.88, "grad_norm": 0.5877722406983326, "learning_rate": 0.0004308365772459439, "loss": 4.6196, "step": 1644 }, { "epoch": 1.88, "grad_norm": 0.7497860339814169, "learning_rate": 0.0004306366535692809, "loss": 4.541, "step": 1645 }, { "epoch": 1.88, "grad_norm": 0.5783856027556545, "learning_rate": 0.00043043665827519165, "loss": 4.5663, "step": 1646 }, { "epoch": 1.88, "grad_norm": 0.6646240536949809, "learning_rate": 0.0004302365914733174, "loss": 4.499, "step": 1647 }, { "epoch": 1.88, "grad_norm": 1.1692365510955416, "learning_rate": 0.0004300364532733384, "loss": 4.5122, "step": 1648 }, { "epoch": 1.88, "grad_norm": 1.125916644396735, "learning_rate": 0.00042983624378497426, "loss": 4.2775, "step": 1649 }, { "epoch": 1.89, "grad_norm": 0.6241479816132806, "learning_rate": 0.00042963596311798325, "loss": 4.4907, "step": 1650 }, { "epoch": 1.89, "grad_norm": 1.2027929384749094, "learning_rate": 0.00042943561138216314, "loss": 4.4543, "step": 1651 }, { "epoch": 1.89, "grad_norm": 0.5950079789723692, "learning_rate": 0.00042923518868735045, "loss": 4.2052, "step": 1652 }, { "epoch": 1.89, "grad_norm": 1.4473557972125584, "learning_rate": 0.00042903469514342054, "loss": 4.512, "step": 1653 }, { "epoch": 1.89, "grad_norm": 0.7118452392677886, "learning_rate": 0.0004288341308602877, "loss": 4.4998, "step": 1654 }, { "epoch": 1.89, "grad_norm": 1.0007710009356714, "learning_rate": 0.0004286334959479048, "loss": 4.5244, "step": 1655 }, { "epoch": 1.89, "grad_norm": 1.0316587186171344, "learning_rate": 0.00042843279051626385, "loss": 4.3448, "step": 1656 }, { "epoch": 1.89, "grad_norm": 1.0275663885997315, "learning_rate": 0.0004282320146753953, "loss": 4.3363, "step": 1657 }, { "epoch": 1.89, "grad_norm": 1.6308786939732394, "learning_rate": 0.0004280311685353679, "loss": 4.3691, "step": 1658 }, { "epoch": 1.9, "grad_norm": 0.5516393458016108, "learning_rate": 0.00042783025220628946, "loss": 4.6687, "step": 1659 }, { "epoch": 1.9, "grad_norm": 1.519094490004298, "learning_rate": 0.0004276292657983059, "loss": 4.3569, "step": 1660 }, { "epoch": 1.9, "grad_norm": 0.7778047105828223, "learning_rate": 0.0004274282094216017, "loss": 4.4711, "step": 1661 }, { "epoch": 1.9, "grad_norm": 0.7222897603470187, "learning_rate": 0.00042722708318639974, "loss": 4.4056, "step": 1662 }, { "epoch": 1.9, "grad_norm": 0.6740884088950804, "learning_rate": 0.00042702588720296104, "loss": 4.4864, "step": 1663 }, { "epoch": 1.9, "grad_norm": 0.7510745543541218, "learning_rate": 0.0004268246215815851, "loss": 4.453, "step": 1664 }, { "epoch": 1.9, "grad_norm": 0.7319593364986465, "learning_rate": 0.00042662328643260924, "loss": 4.4867, "step": 1665 }, { "epoch": 1.9, "grad_norm": 0.5005295564207787, "learning_rate": 0.00042642188186640934, "loss": 4.5488, "step": 1666 }, { "epoch": 1.91, "grad_norm": 0.6856624232920483, "learning_rate": 0.0004262204079933988, "loss": 4.4724, "step": 1667 }, { "epoch": 1.91, "grad_norm": 0.5112606223014141, "learning_rate": 0.00042601886492402965, "loss": 4.3625, "step": 1668 }, { "epoch": 1.91, "grad_norm": 0.5208898593574679, "learning_rate": 0.00042581725276879135, "loss": 4.3936, "step": 1669 }, { "epoch": 1.91, "grad_norm": 0.5433240621282674, "learning_rate": 0.00042561557163821144, "loss": 4.2148, "step": 1670 }, { "epoch": 1.91, "grad_norm": 1.8918250753718338, "learning_rate": 0.0004254138216428552, "loss": 4.2593, "step": 1671 }, { "epoch": 1.91, "grad_norm": 1.2435816516270333, "learning_rate": 0.0004252120028933258, "loss": 4.3805, "step": 1672 }, { "epoch": 1.91, "grad_norm": 1.0098729821994417, "learning_rate": 0.0004250101155002639, "loss": 4.4043, "step": 1673 }, { "epoch": 1.91, "grad_norm": 0.992161613470479, "learning_rate": 0.0004248081595743479, "loss": 4.5608, "step": 1674 }, { "epoch": 1.91, "grad_norm": 0.4171298117993861, "learning_rate": 0.0004246061352262938, "loss": 4.3392, "step": 1675 }, { "epoch": 1.92, "grad_norm": 0.9508658021202646, "learning_rate": 0.0004244040425668552, "loss": 4.4749, "step": 1676 }, { "epoch": 1.92, "grad_norm": 0.731792602020031, "learning_rate": 0.0004242018817068228, "loss": 4.3818, "step": 1677 }, { "epoch": 1.92, "grad_norm": 1.1319227514405912, "learning_rate": 0.0004239996527570251, "loss": 4.4845, "step": 1678 }, { "epoch": 1.92, "grad_norm": 0.9317245734123896, "learning_rate": 0.00042379735582832764, "loss": 4.429, "step": 1679 }, { "epoch": 1.92, "grad_norm": 0.6242143530598611, "learning_rate": 0.00042359499103163334, "loss": 4.4413, "step": 1680 }, { "epoch": 1.92, "grad_norm": 0.7172844527427172, "learning_rate": 0.00042339255847788246, "loss": 4.3425, "step": 1681 }, { "epoch": 1.92, "grad_norm": 0.9794042652786109, "learning_rate": 0.000423190058278052, "loss": 4.4374, "step": 1682 }, { "epoch": 1.92, "grad_norm": 1.108826880671933, "learning_rate": 0.0004229874905431566, "loss": 4.5155, "step": 1683 }, { "epoch": 1.92, "grad_norm": 0.5743233084639326, "learning_rate": 0.0004227848553842474, "loss": 4.3519, "step": 1684 }, { "epoch": 1.93, "grad_norm": 0.7692965815613436, "learning_rate": 0.00042258215291241294, "loss": 4.2709, "step": 1685 }, { "epoch": 1.93, "grad_norm": 1.5353250114366237, "learning_rate": 0.0004223793832387783, "loss": 4.399, "step": 1686 }, { "epoch": 1.93, "grad_norm": 1.0149592077859573, "learning_rate": 0.0004221765464745056, "loss": 4.5445, "step": 1687 }, { "epoch": 1.93, "grad_norm": 0.5389667455725036, "learning_rate": 0.00042197364273079374, "loss": 4.3855, "step": 1688 }, { "epoch": 1.93, "grad_norm": 0.7273276135371077, "learning_rate": 0.0004217706721188782, "loss": 4.3124, "step": 1689 }, { "epoch": 1.93, "grad_norm": 0.7948143531830061, "learning_rate": 0.00042156763475003137, "loss": 4.3479, "step": 1690 }, { "epoch": 1.93, "grad_norm": 0.8465187287578428, "learning_rate": 0.00042136453073556197, "loss": 4.3771, "step": 1691 }, { "epoch": 1.93, "grad_norm": 0.9693227388160204, "learning_rate": 0.0004211613601868154, "loss": 4.6063, "step": 1692 }, { "epoch": 1.93, "grad_norm": 0.788445395513107, "learning_rate": 0.0004209581232151735, "loss": 4.1839, "step": 1693 }, { "epoch": 1.94, "grad_norm": 0.7342286878837484, "learning_rate": 0.00042075481993205445, "loss": 4.3877, "step": 1694 }, { "epoch": 1.94, "grad_norm": 1.6714318116683777, "learning_rate": 0.0004205514504489131, "loss": 4.4136, "step": 1695 }, { "epoch": 1.94, "grad_norm": 0.9178084493793184, "learning_rate": 0.00042034801487724025, "loss": 4.4047, "step": 1696 }, { "epoch": 1.94, "grad_norm": 1.2141523527405822, "learning_rate": 0.00042014451332856293, "loss": 4.3602, "step": 1697 }, { "epoch": 1.94, "grad_norm": 0.8311193323040716, "learning_rate": 0.00041994094591444453, "loss": 4.2386, "step": 1698 }, { "epoch": 1.94, "grad_norm": 0.9629229422004671, "learning_rate": 0.0004197373127464844, "loss": 4.4829, "step": 1699 }, { "epoch": 1.94, "grad_norm": 1.488589131529667, "learning_rate": 0.0004195336139363182, "loss": 4.5186, "step": 1700 }, { "epoch": 1.94, "eval_blimp_filtered_avg": 0.5177611940298508, "eval_blimp_filtered_std": 0.005552093716543691, "step": 1700 }, { "epoch": 1.94, "eval_blimp_supplement_avg": 0.5452586206896551, "eval_blimp_supplement_std": 0.02300868182607275, "step": 1700 }, { "epoch": 1.94, "eval_vqa_filtered_avg": 0.33, "eval_vqa_filtered_std": 0.04725815626252606, "step": 1700 }, { "epoch": 1.94, "eval_winoground_filtered_avg": 0.5, "eval_winoground_filtered_std": 0.050251890762960605, "step": 1700 }, { "epoch": 1.94, "grad_norm": 0.7300250838856243, "learning_rate": 0.00041932984959561713, "loss": 4.4523, "step": 1701 }, { "epoch": 1.95, "grad_norm": 1.618073334619606, "learning_rate": 0.00041912601983608863, "loss": 4.3033, "step": 1702 }, { "epoch": 1.95, "grad_norm": 1.4032577676168612, "learning_rate": 0.00041892212476947607, "loss": 4.4134, "step": 1703 }, { "epoch": 1.95, "grad_norm": 0.9148487490119513, "learning_rate": 0.00041871816450755845, "loss": 4.3107, "step": 1704 }, { "epoch": 1.95, "grad_norm": 1.0026347803407174, "learning_rate": 0.0004185141391621504, "loss": 4.273, "step": 1705 }, { "epoch": 1.95, "grad_norm": 1.1976773350505738, "learning_rate": 0.0004183100488451026, "loss": 4.5786, "step": 1706 }, { "epoch": 1.95, "grad_norm": 2.3566242182390615, "learning_rate": 0.00041810589366830087, "loss": 4.4315, "step": 1707 }, { "epoch": 1.95, "grad_norm": 0.9167315515058958, "learning_rate": 0.0004179016737436672, "loss": 4.3555, "step": 1708 }, { "epoch": 1.95, "grad_norm": 1.6255470256391695, "learning_rate": 0.0004176973891831583, "loss": 4.2553, "step": 1709 }, { "epoch": 1.95, "grad_norm": 1.3376835619454706, "learning_rate": 0.0004174930400987671, "loss": 4.3643, "step": 1710 }, { "epoch": 1.96, "grad_norm": 0.6461831683423362, "learning_rate": 0.0004172886266025213, "loss": 4.3821, "step": 1711 }, { "epoch": 1.96, "grad_norm": 1.488299611786309, "learning_rate": 0.00041708414880648435, "loss": 4.3476, "step": 1712 }, { "epoch": 1.96, "grad_norm": 0.6603762085036762, "learning_rate": 0.00041687960682275463, "loss": 4.2867, "step": 1713 }, { "epoch": 1.96, "grad_norm": 1.2553519366708934, "learning_rate": 0.0004166750007634658, "loss": 4.4367, "step": 1714 }, { "epoch": 1.96, "grad_norm": 0.7761759439308343, "learning_rate": 0.00041647033074078687, "loss": 4.4742, "step": 1715 }, { "epoch": 1.96, "grad_norm": 0.675467029815893, "learning_rate": 0.00041626559686692153, "loss": 4.2413, "step": 1716 }, { "epoch": 1.96, "grad_norm": 0.8284579231336491, "learning_rate": 0.00041606079925410886, "loss": 4.2623, "step": 1717 }, { "epoch": 1.96, "grad_norm": 0.7269783456016601, "learning_rate": 0.0004158559380146223, "loss": 4.4271, "step": 1718 }, { "epoch": 1.96, "grad_norm": 0.8173792912053426, "learning_rate": 0.0004156510132607711, "loss": 4.3246, "step": 1719 }, { "epoch": 1.97, "grad_norm": 0.9097719074472587, "learning_rate": 0.0004154460251048985, "loss": 4.5922, "step": 1720 }, { "epoch": 1.97, "grad_norm": 1.529644444988264, "learning_rate": 0.0004152409736593827, "loss": 4.4221, "step": 1721 }, { "epoch": 1.97, "grad_norm": 0.7221716303188545, "learning_rate": 0.0004150358590366369, "loss": 4.2605, "step": 1722 }, { "epoch": 1.97, "grad_norm": 1.9174787508302422, "learning_rate": 0.0004148306813491087, "loss": 4.3734, "step": 1723 }, { "epoch": 1.97, "grad_norm": 0.6158856805895577, "learning_rate": 0.00041462544070928017, "loss": 4.3793, "step": 1724 }, { "epoch": 1.97, "grad_norm": 2.2094503159869308, "learning_rate": 0.0004144201372296681, "loss": 4.4607, "step": 1725 }, { "epoch": 1.97, "grad_norm": 2.024960978003341, "learning_rate": 0.0004142147710228236, "loss": 4.4571, "step": 1726 }, { "epoch": 1.97, "grad_norm": 1.1468928219261922, "learning_rate": 0.00041400934220133233, "loss": 4.443, "step": 1727 }, { "epoch": 1.97, "grad_norm": 2.014696256779845, "learning_rate": 0.00041380385087781403, "loss": 4.3176, "step": 1728 }, { "epoch": 1.98, "grad_norm": 1.2621236752226246, "learning_rate": 0.000413598297164923, "loss": 4.402, "step": 1729 }, { "epoch": 1.98, "grad_norm": 2.3179207177525445, "learning_rate": 0.00041339268117534736, "loss": 4.3529, "step": 1730 }, { "epoch": 1.98, "grad_norm": 1.3836473571134773, "learning_rate": 0.00041318700302180966, "loss": 4.4947, "step": 1731 }, { "epoch": 1.98, "grad_norm": 3.407274018527, "learning_rate": 0.0004129812628170667, "loss": 4.449, "step": 1732 }, { "epoch": 1.98, "grad_norm": 0.6947471370419148, "learning_rate": 0.0004127754606739088, "loss": 4.5125, "step": 1733 }, { "epoch": 1.98, "grad_norm": 1.5929383599010065, "learning_rate": 0.0004125695967051605, "loss": 4.3996, "step": 1734 }, { "epoch": 1.98, "grad_norm": 0.6965220077427591, "learning_rate": 0.00041236367102368037, "loss": 4.3799, "step": 1735 }, { "epoch": 1.98, "grad_norm": 1.7655269554544015, "learning_rate": 0.0004121576837423606, "loss": 4.4077, "step": 1736 }, { "epoch": 1.99, "grad_norm": 1.1175446043421966, "learning_rate": 0.00041195163497412723, "loss": 4.4055, "step": 1737 }, { "epoch": 1.99, "grad_norm": 1.4566760564025911, "learning_rate": 0.00041174552483193993, "loss": 4.4222, "step": 1738 }, { "epoch": 1.99, "grad_norm": 1.4581941908818814, "learning_rate": 0.00041153935342879214, "loss": 4.374, "step": 1739 }, { "epoch": 1.99, "grad_norm": 1.5184799360393917, "learning_rate": 0.000411333120877711, "loss": 4.402, "step": 1740 }, { "epoch": 1.99, "grad_norm": 0.5027711746244671, "learning_rate": 0.00041112682729175663, "loss": 4.1992, "step": 1741 }, { "epoch": 1.99, "grad_norm": 0.8530257331050496, "learning_rate": 0.0004109204727840234, "loss": 4.2583, "step": 1742 }, { "epoch": 1.99, "grad_norm": 0.5710299656445248, "learning_rate": 0.00041071405746763835, "loss": 4.5542, "step": 1743 }, { "epoch": 1.99, "grad_norm": 0.7936214798249622, "learning_rate": 0.0004105075814557623, "loss": 4.4091, "step": 1744 }, { "epoch": 1.99, "grad_norm": 0.7464092300216617, "learning_rate": 0.0004103010448615892, "loss": 4.2143, "step": 1745 }, { "epoch": 2.0, "grad_norm": 0.621981372302032, "learning_rate": 0.0004100944477983462, "loss": 4.3101, "step": 1746 }, { "epoch": 2.0, "grad_norm": 0.8856327944858391, "learning_rate": 0.00040988779037929364, "loss": 4.2575, "step": 1747 }, { "epoch": 2.0, "grad_norm": 0.5404639675732871, "learning_rate": 0.0004096810727177249, "loss": 4.4514, "step": 1748 }, { "epoch": 2.0, "grad_norm": 1.4846447606399151, "learning_rate": 0.0004094742949269666, "loss": 4.3881, "step": 1749 }, { "epoch": 2.0, "grad_norm": 1.1916002205291676, "learning_rate": 0.00040926745712037784, "loss": 4.5727, "step": 1750 }, { "epoch": 2.0, "grad_norm": 0.6793960794289209, "learning_rate": 0.00040906055941135116, "loss": 4.3721, "step": 1751 }, { "epoch": 2.0, "grad_norm": 0.872012429068228, "learning_rate": 0.00040885360191331166, "loss": 4.4496, "step": 1752 }, { "epoch": 2.0, "grad_norm": 1.0494357633900915, "learning_rate": 0.00040864658473971724, "loss": 4.3193, "step": 1753 }, { "epoch": 2.0, "grad_norm": 1.8626468099760267, "learning_rate": 0.00040843950800405844, "loss": 4.4595, "step": 1754 }, { "epoch": 2.01, "grad_norm": 0.5098361943562533, "learning_rate": 0.00040823237181985867, "loss": 4.4016, "step": 1755 }, { "epoch": 2.01, "grad_norm": 1.5154094787466759, "learning_rate": 0.00040802517630067383, "loss": 4.4414, "step": 1756 }, { "epoch": 2.01, "grad_norm": 0.4639249290006274, "learning_rate": 0.0004078179215600922, "loss": 4.4489, "step": 1757 }, { "epoch": 2.01, "grad_norm": 1.172193060316531, "learning_rate": 0.0004076106077117348, "loss": 4.4148, "step": 1758 }, { "epoch": 2.01, "grad_norm": 1.1144647373015413, "learning_rate": 0.00040740323486925493, "loss": 4.4443, "step": 1759 }, { "epoch": 2.01, "grad_norm": 1.5604760003574236, "learning_rate": 0.00040719580314633807, "loss": 4.498, "step": 1760 }, { "epoch": 2.01, "grad_norm": 0.9305536586387498, "learning_rate": 0.0004069883126567022, "loss": 4.2186, "step": 1761 }, { "epoch": 2.01, "grad_norm": 1.3316756240173309, "learning_rate": 0.00040678076351409736, "loss": 4.3583, "step": 1762 }, { "epoch": 2.01, "grad_norm": 0.5015316637154318, "learning_rate": 0.0004065731558323059, "loss": 4.3843, "step": 1763 }, { "epoch": 2.02, "grad_norm": 0.6919509052401066, "learning_rate": 0.00040636548972514217, "loss": 4.4375, "step": 1764 }, { "epoch": 2.02, "grad_norm": 0.5899623439957696, "learning_rate": 0.0004061577653064526, "loss": 4.3863, "step": 1765 }, { "epoch": 2.02, "grad_norm": 0.9256181552407693, "learning_rate": 0.0004059499826901155, "loss": 4.4612, "step": 1766 }, { "epoch": 2.02, "grad_norm": 0.7401326536428897, "learning_rate": 0.0004057421419900412, "loss": 4.3706, "step": 1767 }, { "epoch": 2.02, "grad_norm": 0.6909642052377967, "learning_rate": 0.0004055342433201717, "loss": 4.3979, "step": 1768 }, { "epoch": 2.02, "grad_norm": 0.6845071193232816, "learning_rate": 0.000405326286794481, "loss": 4.4335, "step": 1769 }, { "epoch": 2.02, "grad_norm": 0.5459638238364051, "learning_rate": 0.00040511827252697467, "loss": 4.4919, "step": 1770 }, { "epoch": 2.02, "grad_norm": 0.7967376620133855, "learning_rate": 0.00040491020063168994, "loss": 4.2999, "step": 1771 }, { "epoch": 2.03, "grad_norm": 0.8004048381600135, "learning_rate": 0.0004047020712226957, "loss": 4.4307, "step": 1772 }, { "epoch": 2.03, "grad_norm": 0.8287935906479291, "learning_rate": 0.0004044938844140924, "loss": 4.3612, "step": 1773 }, { "epoch": 2.03, "grad_norm": 0.9836174616291818, "learning_rate": 0.0004042856403200118, "loss": 4.5172, "step": 1774 }, { "epoch": 2.03, "grad_norm": 0.5205850853546091, "learning_rate": 0.00040407733905461715, "loss": 4.3342, "step": 1775 }, { "epoch": 2.03, "grad_norm": 0.650782700890393, "learning_rate": 0.00040386898073210305, "loss": 4.2359, "step": 1776 }, { "epoch": 2.03, "grad_norm": 0.565982894072005, "learning_rate": 0.0004036605654666954, "loss": 4.3739, "step": 1777 }, { "epoch": 2.03, "grad_norm": 0.4863634811990835, "learning_rate": 0.0004034520933726514, "loss": 4.4819, "step": 1778 }, { "epoch": 2.03, "grad_norm": 0.9509882863250791, "learning_rate": 0.0004032435645642591, "loss": 4.4461, "step": 1779 }, { "epoch": 2.03, "grad_norm": 0.691904850770592, "learning_rate": 0.00040303497915583804, "loss": 4.3363, "step": 1780 }, { "epoch": 2.04, "grad_norm": 0.8285217994520352, "learning_rate": 0.00040282633726173856, "loss": 4.2729, "step": 1781 }, { "epoch": 2.04, "grad_norm": 1.0438603557221944, "learning_rate": 0.0004026176389963419, "loss": 4.3537, "step": 1782 }, { "epoch": 2.04, "grad_norm": 0.4947396635819412, "learning_rate": 0.00040240888447406033, "loss": 4.4694, "step": 1783 }, { "epoch": 2.04, "grad_norm": 0.6256779128679641, "learning_rate": 0.00040220007380933686, "loss": 4.4824, "step": 1784 }, { "epoch": 2.04, "grad_norm": 0.8283134875351507, "learning_rate": 0.00040199120711664565, "loss": 4.4153, "step": 1785 }, { "epoch": 2.04, "grad_norm": 0.7592506693934623, "learning_rate": 0.000401782284510491, "loss": 4.5871, "step": 1786 }, { "epoch": 2.04, "grad_norm": 0.4838933637138438, "learning_rate": 0.0004015733061054083, "loss": 4.5312, "step": 1787 }, { "epoch": 2.04, "grad_norm": 0.5259381115834612, "learning_rate": 0.0004013642720159631, "loss": 4.4037, "step": 1788 }, { "epoch": 2.04, "grad_norm": 0.43242940515886147, "learning_rate": 0.0004011551823567521, "loss": 4.303, "step": 1789 }, { "epoch": 2.05, "grad_norm": 0.9883664804878028, "learning_rate": 0.0004009460372424018, "loss": 4.4398, "step": 1790 }, { "epoch": 2.05, "grad_norm": 0.6022556328322494, "learning_rate": 0.00040073683678756974, "loss": 4.4385, "step": 1791 }, { "epoch": 2.05, "grad_norm": 0.553223105640268, "learning_rate": 0.0004005275811069432, "loss": 4.3427, "step": 1792 }, { "epoch": 2.05, "grad_norm": 0.6100627113656589, "learning_rate": 0.0004003182703152402, "loss": 4.436, "step": 1793 }, { "epoch": 2.05, "grad_norm": 0.7707356538582438, "learning_rate": 0.00040010890452720854, "loss": 4.4925, "step": 1794 }, { "epoch": 2.05, "grad_norm": 0.44880951159965, "learning_rate": 0.00039989948385762666, "loss": 4.5008, "step": 1795 }, { "epoch": 2.05, "grad_norm": 0.48781824236574567, "learning_rate": 0.0003996900084213027, "loss": 4.3456, "step": 1796 }, { "epoch": 2.05, "grad_norm": 0.5701972776370563, "learning_rate": 0.0003994804783330751, "loss": 4.2546, "step": 1797 }, { "epoch": 2.05, "grad_norm": 0.9212027993139891, "learning_rate": 0.00039927089370781193, "loss": 4.4039, "step": 1798 }, { "epoch": 2.06, "grad_norm": 0.6057753769984237, "learning_rate": 0.0003990612546604114, "loss": 4.3214, "step": 1799 }, { "epoch": 2.06, "grad_norm": 0.9917820280240645, "learning_rate": 0.0003988515613058016, "loss": 4.417, "step": 1800 }, { "epoch": 2.06, "eval_blimp_filtered_avg": 0.5180597014925373, "eval_blimp_filtered_std": 0.0055886917359090175, "step": 1800 }, { "epoch": 2.06, "eval_blimp_supplement_avg": 0.5517241379310345, "eval_blimp_supplement_std": 0.02311303070191751, "step": 1800 }, { "epoch": 2.06, "eval_vqa_filtered_avg": 0.34, "eval_vqa_filtered_std": 0.047609522856952365, "step": 1800 }, { "epoch": 2.06, "eval_winoground_filtered_avg": 0.48, "eval_winoground_filtered_std": 0.05021167315686779, "step": 1800 }, { "epoch": 2.06, "grad_norm": 1.0449728097343656, "learning_rate": 0.0003986418137589403, "loss": 4.3009, "step": 1801 }, { "epoch": 2.06, "grad_norm": 0.5545312366554692, "learning_rate": 0.000398432012134815, "loss": 4.2492, "step": 1802 }, { "epoch": 2.06, "grad_norm": 1.3840083468460764, "learning_rate": 0.0003982221565484427, "loss": 4.3108, "step": 1803 }, { "epoch": 2.06, "grad_norm": 1.339426742976476, "learning_rate": 0.0003980122471148701, "loss": 4.3457, "step": 1804 }, { "epoch": 2.06, "grad_norm": 0.6107240803312746, "learning_rate": 0.0003978022839491736, "loss": 4.3249, "step": 1805 }, { "epoch": 2.06, "grad_norm": 0.6606070708023982, "learning_rate": 0.00039759226716645877, "loss": 4.3759, "step": 1806 }, { "epoch": 2.07, "grad_norm": 0.8520228054668548, "learning_rate": 0.00039738219688186064, "loss": 4.3245, "step": 1807 }, { "epoch": 2.07, "grad_norm": 1.2459207869462223, "learning_rate": 0.0003971720732105437, "loss": 4.4886, "step": 1808 }, { "epoch": 2.07, "grad_norm": 0.6505091954033541, "learning_rate": 0.0003969618962677016, "loss": 4.3171, "step": 1809 }, { "epoch": 2.07, "grad_norm": 1.4576826547423836, "learning_rate": 0.0003967516661685573, "loss": 4.3231, "step": 1810 }, { "epoch": 2.07, "grad_norm": 0.939585752353907, "learning_rate": 0.0003965413830283627, "loss": 4.391, "step": 1811 }, { "epoch": 2.07, "grad_norm": 2.3260532380080146, "learning_rate": 0.0003963310469623989, "loss": 4.2982, "step": 1812 }, { "epoch": 2.07, "grad_norm": 0.8834472439386788, "learning_rate": 0.00039612065808597614, "loss": 4.2209, "step": 1813 }, { "epoch": 2.07, "grad_norm": 1.9738929503949598, "learning_rate": 0.00039591021651443347, "loss": 4.2313, "step": 1814 }, { "epoch": 2.07, "grad_norm": 0.8016675042380728, "learning_rate": 0.00039569972236313866, "loss": 4.2145, "step": 1815 }, { "epoch": 2.08, "grad_norm": 1.3837154419679747, "learning_rate": 0.0003954891757474887, "loss": 4.4249, "step": 1816 }, { "epoch": 2.08, "grad_norm": 0.5619410145955279, "learning_rate": 0.000395278576782909, "loss": 4.2743, "step": 1817 }, { "epoch": 2.08, "grad_norm": 2.0269468865765687, "learning_rate": 0.000395067925584854, "loss": 4.4762, "step": 1818 }, { "epoch": 2.08, "grad_norm": 1.2245096716925368, "learning_rate": 0.0003948572222688063, "loss": 4.2368, "step": 1819 }, { "epoch": 2.08, "grad_norm": 1.1358501906530618, "learning_rate": 0.0003946464669502775, "loss": 4.205, "step": 1820 }, { "epoch": 2.08, "grad_norm": 0.5126446303714004, "learning_rate": 0.00039443565974480767, "loss": 4.2779, "step": 1821 }, { "epoch": 2.08, "grad_norm": 2.0523584443418934, "learning_rate": 0.00039422480076796494, "loss": 4.3968, "step": 1822 }, { "epoch": 2.08, "grad_norm": 1.645138812274915, "learning_rate": 0.0003940138901353463, "loss": 4.3991, "step": 1823 }, { "epoch": 2.08, "grad_norm": 0.6389423193212534, "learning_rate": 0.00039380292796257674, "loss": 4.4694, "step": 1824 }, { "epoch": 2.09, "grad_norm": 1.724346591718879, "learning_rate": 0.0003935919143653098, "loss": 4.3319, "step": 1825 }, { "epoch": 2.09, "grad_norm": 0.7907655729551181, "learning_rate": 0.00039338084945922684, "loss": 4.4086, "step": 1826 }, { "epoch": 2.09, "grad_norm": 1.030241278632822, "learning_rate": 0.0003931697333600376, "loss": 4.3546, "step": 1827 }, { "epoch": 2.09, "grad_norm": 0.8623956694180515, "learning_rate": 0.0003929585661834798, "loss": 4.3129, "step": 1828 }, { "epoch": 2.09, "grad_norm": 1.2165064977901894, "learning_rate": 0.0003927473480453193, "loss": 4.4959, "step": 1829 }, { "epoch": 2.09, "grad_norm": 1.0019290359651514, "learning_rate": 0.00039253607906134967, "loss": 4.4233, "step": 1830 }, { "epoch": 2.09, "grad_norm": 1.3072799717712242, "learning_rate": 0.0003923247593473925, "loss": 4.304, "step": 1831 }, { "epoch": 2.09, "grad_norm": 2.0400281684940977, "learning_rate": 0.00039211338901929706, "loss": 4.4678, "step": 1832 }, { "epoch": 2.09, "grad_norm": 0.6092118713716793, "learning_rate": 0.00039190196819294075, "loss": 4.3624, "step": 1833 }, { "epoch": 2.1, "grad_norm": 1.5798191804952015, "learning_rate": 0.0003916904969842281, "loss": 4.4589, "step": 1834 }, { "epoch": 2.1, "grad_norm": 0.6506000561530033, "learning_rate": 0.0003914789755090916, "loss": 4.2735, "step": 1835 }, { "epoch": 2.1, "grad_norm": 2.1831595181097803, "learning_rate": 0.00039126740388349126, "loss": 4.4347, "step": 1836 }, { "epoch": 2.1, "grad_norm": 0.5289697997865335, "learning_rate": 0.00039105578222341453, "loss": 4.4403, "step": 1837 }, { "epoch": 2.1, "grad_norm": 2.0147280453085443, "learning_rate": 0.00039084411064487645, "loss": 4.4506, "step": 1838 }, { "epoch": 2.1, "grad_norm": 0.5598534355086349, "learning_rate": 0.000390632389263919, "loss": 4.3747, "step": 1839 }, { "epoch": 2.1, "grad_norm": 1.836513424692338, "learning_rate": 0.00039042061819661195, "loss": 4.2392, "step": 1840 }, { "epoch": 2.1, "grad_norm": 0.5892461694388671, "learning_rate": 0.0003902087975590521, "loss": 4.3193, "step": 1841 }, { "epoch": 2.11, "grad_norm": 1.2386409270869203, "learning_rate": 0.0003899969274673634, "loss": 4.2106, "step": 1842 }, { "epoch": 2.11, "grad_norm": 0.8003772093939621, "learning_rate": 0.000389785008037697, "loss": 4.1398, "step": 1843 }, { "epoch": 2.11, "grad_norm": 1.1164739756975015, "learning_rate": 0.000389573039386231, "loss": 4.2384, "step": 1844 }, { "epoch": 2.11, "grad_norm": 0.5637190388740277, "learning_rate": 0.00038936102162917053, "loss": 4.3483, "step": 1845 }, { "epoch": 2.11, "grad_norm": 0.8625411023635501, "learning_rate": 0.0003891489548827476, "loss": 4.1983, "step": 1846 }, { "epoch": 2.11, "grad_norm": 0.5319855441958788, "learning_rate": 0.0003889368392632212, "loss": 4.2883, "step": 1847 }, { "epoch": 2.11, "grad_norm": 0.6819321824673902, "learning_rate": 0.000388724674886877, "loss": 4.2565, "step": 1848 }, { "epoch": 2.11, "grad_norm": 0.569093534872474, "learning_rate": 0.0003885124618700274, "loss": 4.3668, "step": 1849 }, { "epoch": 2.11, "grad_norm": 0.7346841149769192, "learning_rate": 0.00038830020032901147, "loss": 4.3794, "step": 1850 }, { "epoch": 2.12, "grad_norm": 1.090468755953588, "learning_rate": 0.000388087890380195, "loss": 4.3192, "step": 1851 }, { "epoch": 2.12, "grad_norm": 0.6452335895493556, "learning_rate": 0.0003878755321399702, "loss": 4.2989, "step": 1852 }, { "epoch": 2.12, "grad_norm": 0.6382796458995724, "learning_rate": 0.00038766312572475575, "loss": 4.2366, "step": 1853 }, { "epoch": 2.12, "grad_norm": 0.6688957044362543, "learning_rate": 0.0003874506712509967, "loss": 4.3003, "step": 1854 }, { "epoch": 2.12, "grad_norm": 2.024630627076105, "learning_rate": 0.00038723816883516463, "loss": 4.2687, "step": 1855 }, { "epoch": 2.12, "grad_norm": 1.5303406699603224, "learning_rate": 0.0003870256185937573, "loss": 4.2146, "step": 1856 }, { "epoch": 2.12, "grad_norm": 1.046553373277083, "learning_rate": 0.0003868130206432986, "loss": 4.4741, "step": 1857 }, { "epoch": 2.12, "grad_norm": 1.5178050947537223, "learning_rate": 0.0003866003751003386, "loss": 4.3701, "step": 1858 }, { "epoch": 2.12, "grad_norm": 0.7577686674214523, "learning_rate": 0.00038638768208145374, "loss": 4.2662, "step": 1859 }, { "epoch": 2.13, "grad_norm": 1.3456568053127247, "learning_rate": 0.000386174941703246, "loss": 4.3344, "step": 1860 }, { "epoch": 2.13, "grad_norm": 0.847863919004829, "learning_rate": 0.0003859621540823438, "loss": 4.4171, "step": 1861 }, { "epoch": 2.13, "grad_norm": 1.117724752105106, "learning_rate": 0.000385749319335401, "loss": 4.2398, "step": 1862 }, { "epoch": 2.13, "grad_norm": 0.6255588774099449, "learning_rate": 0.00038553643757909786, "loss": 4.3058, "step": 1863 }, { "epoch": 2.13, "grad_norm": 0.8527020243574898, "learning_rate": 0.0003853235089301398, "loss": 4.2551, "step": 1864 }, { "epoch": 2.13, "grad_norm": 1.179348689486761, "learning_rate": 0.0003851105335052585, "loss": 4.4247, "step": 1865 }, { "epoch": 2.13, "grad_norm": 0.6950032175360856, "learning_rate": 0.00038489751142121065, "loss": 4.1867, "step": 1866 }, { "epoch": 2.13, "grad_norm": 1.3516421885910455, "learning_rate": 0.00038468444279477924, "loss": 4.2827, "step": 1867 }, { "epoch": 2.13, "grad_norm": 0.9861483577316386, "learning_rate": 0.00038447132774277227, "loss": 4.3759, "step": 1868 }, { "epoch": 2.14, "grad_norm": 0.6639698920242082, "learning_rate": 0.0003842581663820234, "loss": 4.3996, "step": 1869 }, { "epoch": 2.14, "grad_norm": 0.7467008123060245, "learning_rate": 0.00038404495882939153, "loss": 4.4333, "step": 1870 }, { "epoch": 2.14, "grad_norm": 1.0172595251619347, "learning_rate": 0.0003838317052017612, "loss": 4.3245, "step": 1871 }, { "epoch": 2.14, "grad_norm": 1.5695280618302556, "learning_rate": 0.0003836184056160417, "loss": 4.3191, "step": 1872 }, { "epoch": 2.14, "grad_norm": 0.5309567551676843, "learning_rate": 0.000383405060189168, "loss": 4.4582, "step": 1873 }, { "epoch": 2.14, "grad_norm": 1.2663917207564606, "learning_rate": 0.00038319166903809983, "loss": 4.5093, "step": 1874 }, { "epoch": 2.14, "grad_norm": 0.562629003857273, "learning_rate": 0.0003829782322798224, "loss": 4.4028, "step": 1875 }, { "epoch": 2.14, "grad_norm": 1.4049115481085637, "learning_rate": 0.0003827647500313456, "loss": 4.2516, "step": 1876 }, { "epoch": 2.15, "grad_norm": 0.46218488290562715, "learning_rate": 0.00038255122240970417, "loss": 4.3072, "step": 1877 }, { "epoch": 2.15, "grad_norm": 2.3873334639259354, "learning_rate": 0.000382337649531958, "loss": 4.4592, "step": 1878 }, { "epoch": 2.15, "grad_norm": 1.217042444620816, "learning_rate": 0.0003821240315151918, "loss": 4.2833, "step": 1879 }, { "epoch": 2.15, "grad_norm": 1.6582655093268786, "learning_rate": 0.0003819103684765148, "loss": 4.2842, "step": 1880 }, { "epoch": 2.15, "grad_norm": 1.3972401654050137, "learning_rate": 0.00038169666053306114, "loss": 4.1703, "step": 1881 }, { "epoch": 2.15, "grad_norm": 0.7467223298150372, "learning_rate": 0.0003814829078019892, "loss": 4.4403, "step": 1882 }, { "epoch": 2.15, "grad_norm": 1.0411155739528064, "learning_rate": 0.00038126911040048243, "loss": 4.2929, "step": 1883 }, { "epoch": 2.15, "grad_norm": 0.7678010807756125, "learning_rate": 0.0003810552684457484, "loss": 4.2307, "step": 1884 }, { "epoch": 2.15, "grad_norm": 1.4847062489648213, "learning_rate": 0.0003808413820550192, "loss": 4.179, "step": 1885 }, { "epoch": 2.16, "grad_norm": 1.1084009441128622, "learning_rate": 0.0003806274513455514, "loss": 4.3166, "step": 1886 }, { "epoch": 2.16, "grad_norm": 0.7340223164359629, "learning_rate": 0.00038041347643462577, "loss": 4.3272, "step": 1887 }, { "epoch": 2.16, "grad_norm": 0.8556201502140945, "learning_rate": 0.00038019945743954713, "loss": 4.3661, "step": 1888 }, { "epoch": 2.16, "grad_norm": 1.9375531938463024, "learning_rate": 0.0003799853944776447, "loss": 4.2137, "step": 1889 }, { "epoch": 2.16, "grad_norm": 2.7251919281605783, "learning_rate": 0.0003797712876662719, "loss": 4.433, "step": 1890 }, { "epoch": 2.16, "grad_norm": 1.3056054653112923, "learning_rate": 0.00037955713712280604, "loss": 4.1496, "step": 1891 }, { "epoch": 2.16, "grad_norm": 1.916591949066931, "learning_rate": 0.0003793429429646483, "loss": 4.1207, "step": 1892 }, { "epoch": 2.16, "grad_norm": 2.288326928281593, "learning_rate": 0.00037912870530922374, "loss": 4.4123, "step": 1893 }, { "epoch": 2.16, "grad_norm": 1.5115048686310228, "learning_rate": 0.00037891442427398156, "loss": 4.5052, "step": 1894 }, { "epoch": 2.17, "grad_norm": 3.173971963082131, "learning_rate": 0.0003787000999763947, "loss": 4.4974, "step": 1895 }, { "epoch": 2.17, "grad_norm": 0.6209881081686413, "learning_rate": 0.0003784857325339595, "loss": 4.0736, "step": 1896 }, { "epoch": 2.17, "grad_norm": 2.6068529181484643, "learning_rate": 0.0003782713220641962, "loss": 4.5044, "step": 1897 }, { "epoch": 2.17, "grad_norm": 1.1037728457052378, "learning_rate": 0.0003780568686846486, "loss": 4.4919, "step": 1898 }, { "epoch": 2.17, "grad_norm": 1.37521295134448, "learning_rate": 0.00037784237251288407, "loss": 4.2654, "step": 1899 }, { "epoch": 2.17, "grad_norm": 1.4949886192277226, "learning_rate": 0.0003776278336664932, "loss": 4.2359, "step": 1900 }, { "epoch": 2.17, "eval_blimp_filtered_avg": 0.5161194029850746, "eval_blimp_filtered_std": 0.005657380882384819, "step": 1900 }, { "epoch": 2.17, "eval_blimp_supplement_avg": 0.5129310344827587, "eval_blimp_supplement_std": 0.02313771269549358, "step": 1900 }, { "epoch": 2.17, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 1900 }, { "epoch": 2.17, "eval_winoground_filtered_avg": 0.47, "eval_winoground_filtered_std": 0.05016135580465919, "step": 1900 }, { "epoch": 2.17, "grad_norm": 0.5820969312267368, "learning_rate": 0.00037741325226309025, "loss": 4.321, "step": 1901 }, { "epoch": 2.17, "grad_norm": 1.0901489784564493, "learning_rate": 0.0003771986284203128, "loss": 4.2728, "step": 1902 }, { "epoch": 2.17, "grad_norm": 0.7353463222580834, "learning_rate": 0.00037698396225582147, "loss": 4.5326, "step": 1903 }, { "epoch": 2.18, "grad_norm": 1.58336009380215, "learning_rate": 0.00037676925388730023, "loss": 4.4527, "step": 1904 }, { "epoch": 2.18, "grad_norm": 0.772569497686848, "learning_rate": 0.0003765545034324561, "loss": 4.2108, "step": 1905 }, { "epoch": 2.18, "grad_norm": 1.1086024571479756, "learning_rate": 0.00037633971100901946, "loss": 4.3602, "step": 1906 }, { "epoch": 2.18, "grad_norm": 0.7145970974797617, "learning_rate": 0.0003761248767347435, "loss": 4.1881, "step": 1907 }, { "epoch": 2.18, "grad_norm": 1.8056646561604979, "learning_rate": 0.000375910000727404, "loss": 4.2723, "step": 1908 }, { "epoch": 2.18, "grad_norm": 2.910407155458624, "learning_rate": 0.00037569508310480017, "loss": 4.2565, "step": 1909 }, { "epoch": 2.18, "grad_norm": 1.3524974809425307, "learning_rate": 0.0003754801239847539, "loss": 4.4411, "step": 1910 }, { "epoch": 2.18, "grad_norm": 1.915177277794558, "learning_rate": 0.0003752651234851096, "loss": 4.4339, "step": 1911 }, { "epoch": 2.19, "grad_norm": 1.6259194071439245, "learning_rate": 0.0003750500817237347, "loss": 4.258, "step": 1912 }, { "epoch": 2.19, "grad_norm": 2.0877094530218336, "learning_rate": 0.0003748349988185188, "loss": 4.4552, "step": 1913 }, { "epoch": 2.19, "grad_norm": 3.3150051661743203, "learning_rate": 0.0003746198748873745, "loss": 4.5314, "step": 1914 }, { "epoch": 2.19, "grad_norm": 0.8936618888231032, "learning_rate": 0.0003744047100482367, "loss": 4.2391, "step": 1915 }, { "epoch": 2.19, "grad_norm": 2.8156435653029486, "learning_rate": 0.00037418950441906265, "loss": 4.3262, "step": 1916 }, { "epoch": 2.19, "grad_norm": 0.827652116930843, "learning_rate": 0.00037397425811783213, "loss": 4.1682, "step": 1917 }, { "epoch": 2.19, "grad_norm": 2.069305715030624, "learning_rate": 0.0003737589712625472, "loss": 4.2621, "step": 1918 }, { "epoch": 2.19, "grad_norm": 0.6345891548158528, "learning_rate": 0.00037354364397123197, "loss": 4.3089, "step": 1919 }, { "epoch": 2.19, "grad_norm": 2.1556614565244177, "learning_rate": 0.0003733282763619328, "loss": 4.4017, "step": 1920 }, { "epoch": 2.2, "grad_norm": 1.1481279313257724, "learning_rate": 0.00037311286855271834, "loss": 4.2642, "step": 1921 }, { "epoch": 2.2, "grad_norm": 1.6249412811920643, "learning_rate": 0.00037289742066167907, "loss": 4.301, "step": 1922 }, { "epoch": 2.2, "grad_norm": 0.8142456024049323, "learning_rate": 0.0003726819328069275, "loss": 4.4713, "step": 1923 }, { "epoch": 2.2, "grad_norm": 0.8963025194147003, "learning_rate": 0.00037246640510659796, "loss": 4.184, "step": 1924 }, { "epoch": 2.2, "grad_norm": 0.955004418327837, "learning_rate": 0.00037225083767884683, "loss": 4.326, "step": 1925 }, { "epoch": 2.2, "grad_norm": 1.1953897342261859, "learning_rate": 0.00037203523064185213, "loss": 4.2717, "step": 1926 }, { "epoch": 2.2, "grad_norm": 1.3617322372185952, "learning_rate": 0.0003718195841138137, "loss": 4.5012, "step": 1927 }, { "epoch": 2.2, "grad_norm": 0.746900102699365, "learning_rate": 0.00037160389821295277, "loss": 4.4317, "step": 1928 }, { "epoch": 2.2, "grad_norm": 0.8559967701882609, "learning_rate": 0.0003713881730575125, "loss": 4.3187, "step": 1929 }, { "epoch": 2.21, "grad_norm": 1.1399301083678148, "learning_rate": 0.00037117240876575715, "loss": 4.4031, "step": 1930 }, { "epoch": 2.21, "grad_norm": 0.8227259263083517, "learning_rate": 0.0003709566054559731, "loss": 4.273, "step": 1931 }, { "epoch": 2.21, "grad_norm": 0.5822931495425235, "learning_rate": 0.0003707407632464674, "loss": 4.2315, "step": 1932 }, { "epoch": 2.21, "grad_norm": 0.9058795621979164, "learning_rate": 0.00037052488225556874, "loss": 4.1084, "step": 1933 }, { "epoch": 2.21, "grad_norm": 1.373272974683091, "learning_rate": 0.0003703089626016272, "loss": 4.3785, "step": 1934 }, { "epoch": 2.21, "grad_norm": 0.7772662032031363, "learning_rate": 0.00037009300440301386, "loss": 4.2644, "step": 1935 }, { "epoch": 2.21, "grad_norm": 1.2646756676713584, "learning_rate": 0.00036987700777812093, "loss": 4.1889, "step": 1936 }, { "epoch": 2.21, "grad_norm": 0.6197336335870793, "learning_rate": 0.00036966097284536195, "loss": 4.2526, "step": 1937 }, { "epoch": 2.21, "grad_norm": 0.8004531813768027, "learning_rate": 0.00036944489972317104, "loss": 4.2407, "step": 1938 }, { "epoch": 2.22, "grad_norm": 0.5476796485163309, "learning_rate": 0.0003692287885300037, "loss": 4.1308, "step": 1939 }, { "epoch": 2.22, "grad_norm": 0.7083165043678129, "learning_rate": 0.00036901263938433573, "loss": 4.1768, "step": 1940 }, { "epoch": 2.22, "grad_norm": 0.7104264981580629, "learning_rate": 0.0003687964524046644, "loss": 4.2019, "step": 1941 }, { "epoch": 2.22, "grad_norm": 0.6861222249003641, "learning_rate": 0.00036858022770950725, "loss": 4.4594, "step": 1942 }, { "epoch": 2.22, "grad_norm": 1.5694346035106297, "learning_rate": 0.00036836396541740273, "loss": 4.429, "step": 1943 }, { "epoch": 2.22, "grad_norm": 1.327718001828511, "learning_rate": 0.00036814766564690975, "loss": 4.2689, "step": 1944 }, { "epoch": 2.22, "grad_norm": 0.5119791783496837, "learning_rate": 0.00036793132851660796, "loss": 4.2603, "step": 1945 }, { "epoch": 2.22, "grad_norm": 0.8128235488407769, "learning_rate": 0.00036771495414509724, "loss": 4.1576, "step": 1946 }, { "epoch": 2.23, "grad_norm": 0.5913145594958984, "learning_rate": 0.0003674985426509981, "loss": 4.2332, "step": 1947 }, { "epoch": 2.23, "grad_norm": 1.5207294336822073, "learning_rate": 0.00036728209415295125, "loss": 4.1698, "step": 1948 }, { "epoch": 2.23, "grad_norm": 0.6458615980432652, "learning_rate": 0.00036706560876961786, "loss": 4.2807, "step": 1949 }, { "epoch": 2.23, "grad_norm": 1.6757926080366181, "learning_rate": 0.0003668490866196791, "loss": 4.3613, "step": 1950 }, { "epoch": 2.23, "grad_norm": 1.5708256359523067, "learning_rate": 0.0003666325278218364, "loss": 4.4347, "step": 1951 }, { "epoch": 2.23, "grad_norm": 1.249263629449511, "learning_rate": 0.00036641593249481144, "loss": 4.3893, "step": 1952 }, { "epoch": 2.23, "grad_norm": 1.9876521414430748, "learning_rate": 0.0003661993007573456, "loss": 4.2624, "step": 1953 }, { "epoch": 2.23, "grad_norm": 0.788202432230404, "learning_rate": 0.0003659826327282006, "loss": 4.2957, "step": 1954 }, { "epoch": 2.23, "grad_norm": 1.370327915307975, "learning_rate": 0.00036576592852615765, "loss": 4.4122, "step": 1955 }, { "epoch": 2.24, "grad_norm": 1.1757210412032089, "learning_rate": 0.0003655491882700181, "loss": 4.1165, "step": 1956 }, { "epoch": 2.24, "grad_norm": 0.8528593409148948, "learning_rate": 0.00036533241207860296, "loss": 4.1394, "step": 1957 }, { "epoch": 2.24, "grad_norm": 2.260786243595356, "learning_rate": 0.00036511560007075296, "loss": 4.3379, "step": 1958 }, { "epoch": 2.24, "grad_norm": 0.9210336585158017, "learning_rate": 0.00036489875236532835, "loss": 4.2524, "step": 1959 }, { "epoch": 2.24, "grad_norm": 2.5068193582258416, "learning_rate": 0.0003646818690812092, "loss": 4.2391, "step": 1960 }, { "epoch": 2.24, "grad_norm": 1.0753184504675808, "learning_rate": 0.00036446495033729475, "loss": 4.4959, "step": 1961 }, { "epoch": 2.24, "grad_norm": 1.3537620783234052, "learning_rate": 0.00036424799625250394, "loss": 4.1611, "step": 1962 }, { "epoch": 2.24, "grad_norm": 0.5568171824219594, "learning_rate": 0.00036403100694577513, "loss": 4.3755, "step": 1963 }, { "epoch": 2.24, "grad_norm": 1.4209395900691182, "learning_rate": 0.0003638139825360657, "loss": 4.4317, "step": 1964 }, { "epoch": 2.25, "grad_norm": 1.0481892760580325, "learning_rate": 0.00036359692314235254, "loss": 4.1986, "step": 1965 }, { "epoch": 2.25, "grad_norm": 2.6713210023040883, "learning_rate": 0.0003633798288836316, "loss": 4.302, "step": 1966 }, { "epoch": 2.25, "grad_norm": 0.986233318460866, "learning_rate": 0.00036316269987891783, "loss": 4.2121, "step": 1967 }, { "epoch": 2.25, "grad_norm": 0.9097523119314848, "learning_rate": 0.00036294553624724554, "loss": 4.173, "step": 1968 }, { "epoch": 2.25, "grad_norm": 0.8761538711462373, "learning_rate": 0.00036272833810766777, "loss": 4.0156, "step": 1969 }, { "epoch": 2.25, "grad_norm": 1.9389458630694194, "learning_rate": 0.00036251110557925653, "loss": 4.3884, "step": 1970 }, { "epoch": 2.25, "grad_norm": 3.5865913904359346, "learning_rate": 0.00036229383878110276, "loss": 4.5273, "step": 1971 }, { "epoch": 2.25, "grad_norm": 0.9757630038370871, "learning_rate": 0.00036207653783231603, "loss": 4.3175, "step": 1972 }, { "epoch": 2.25, "grad_norm": 2.5048807949752656, "learning_rate": 0.0003618592028520249, "loss": 4.2563, "step": 1973 }, { "epoch": 2.26, "grad_norm": 1.5768652553963096, "learning_rate": 0.0003616418339593763, "loss": 4.2402, "step": 1974 }, { "epoch": 2.26, "grad_norm": 3.100278801880563, "learning_rate": 0.0003614244312735358, "loss": 4.3517, "step": 1975 }, { "epoch": 2.26, "grad_norm": 2.0491672597350368, "learning_rate": 0.00036120699491368777, "loss": 4.2304, "step": 1976 }, { "epoch": 2.26, "grad_norm": 4.024384941169139, "learning_rate": 0.00036098952499903473, "loss": 4.444, "step": 1977 }, { "epoch": 2.26, "grad_norm": 0.768458783703319, "learning_rate": 0.0003607720216487977, "loss": 4.5618, "step": 1978 }, { "epoch": 2.26, "grad_norm": 4.750316020921413, "learning_rate": 0.0003605544849822159, "loss": 4.3365, "step": 1979 }, { "epoch": 2.26, "grad_norm": 3.8992631162026465, "learning_rate": 0.00036033691511854724, "loss": 4.2631, "step": 1980 }, { "epoch": 2.26, "grad_norm": 1.4199882641157622, "learning_rate": 0.00036011931217706734, "loss": 4.333, "step": 1981 }, { "epoch": 2.27, "grad_norm": 2.1468013352365176, "learning_rate": 0.00035990167627707024, "loss": 4.4306, "step": 1982 }, { "epoch": 2.27, "grad_norm": 1.0580504763610508, "learning_rate": 0.00035968400753786777, "loss": 4.4036, "step": 1983 }, { "epoch": 2.27, "grad_norm": 1.9848472814343332, "learning_rate": 0.00035946630607879026, "loss": 4.2871, "step": 1984 }, { "epoch": 2.27, "grad_norm": 1.508705835087846, "learning_rate": 0.0003592485720191854, "loss": 4.2564, "step": 1985 }, { "epoch": 2.27, "grad_norm": 1.3557176853602833, "learning_rate": 0.00035903080547841924, "loss": 4.2564, "step": 1986 }, { "epoch": 2.27, "grad_norm": 1.2079872770352276, "learning_rate": 0.00035881300657587513, "loss": 4.2786, "step": 1987 }, { "epoch": 2.27, "grad_norm": 1.1348203234397871, "learning_rate": 0.0003585951754309548, "loss": 4.264, "step": 1988 }, { "epoch": 2.27, "grad_norm": 1.3662149553607998, "learning_rate": 0.000358377312163077, "loss": 4.2526, "step": 1989 }, { "epoch": 2.27, "grad_norm": 1.6185461414346178, "learning_rate": 0.00035815941689167866, "loss": 4.1711, "step": 1990 }, { "epoch": 2.28, "grad_norm": 2.2528369414675007, "learning_rate": 0.0003579414897362138, "loss": 4.329, "step": 1991 }, { "epoch": 2.28, "grad_norm": 1.538266406416692, "learning_rate": 0.00035772353081615416, "loss": 4.3181, "step": 1992 }, { "epoch": 2.28, "grad_norm": 2.844320560985542, "learning_rate": 0.00035750554025098895, "loss": 4.4685, "step": 1993 }, { "epoch": 2.28, "grad_norm": 1.1949789412577927, "learning_rate": 0.00035728751816022444, "loss": 4.3234, "step": 1994 }, { "epoch": 2.28, "grad_norm": 2.5230718564159114, "learning_rate": 0.0003570694646633845, "loss": 4.291, "step": 1995 }, { "epoch": 2.28, "grad_norm": 1.2895626638097109, "learning_rate": 0.0003568513798800101, "loss": 4.2317, "step": 1996 }, { "epoch": 2.28, "grad_norm": 1.2685763777791328, "learning_rate": 0.0003566332639296592, "loss": 4.3211, "step": 1997 }, { "epoch": 2.28, "grad_norm": 2.424884858269157, "learning_rate": 0.00035641511693190715, "loss": 4.2813, "step": 1998 }, { "epoch": 2.28, "grad_norm": 0.9950928632674216, "learning_rate": 0.000356196939006346, "loss": 4.3262, "step": 1999 }, { "epoch": 2.29, "grad_norm": 2.0404976954183174, "learning_rate": 0.00035597873027258515, "loss": 4.2821, "step": 2000 }, { "epoch": 2.29, "eval_blimp_filtered_avg": 0.5262686567164179, "eval_blimp_filtered_std": 0.005585973329074082, "step": 2000 }, { "epoch": 2.29, "eval_blimp_supplement_avg": 0.5732758620689655, "eval_blimp_supplement_std": 0.02289001564612328, "step": 2000 }, { "epoch": 2.29, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 2000 }, { "epoch": 2.29, "eval_winoground_filtered_avg": 0.5, "eval_winoground_filtered_std": 0.050251890762960605, "step": 2000 }, { "epoch": 2.29, "grad_norm": 5.945998840036045, "learning_rate": 0.00035576049085025047, "loss": 4.3944, "step": 2001 }, { "epoch": 2.29, "grad_norm": 3.325152607339408, "learning_rate": 0.0003555422208589849, "loss": 4.3731, "step": 2002 }, { "epoch": 2.29, "grad_norm": 1.7014394659757326, "learning_rate": 0.000355323920418448, "loss": 4.3447, "step": 2003 }, { "epoch": 2.29, "grad_norm": 2.3452969000796395, "learning_rate": 0.0003551055896483162, "loss": 4.4419, "step": 2004 }, { "epoch": 2.29, "grad_norm": 2.878275280046811, "learning_rate": 0.0003548872286682826, "loss": 4.4025, "step": 2005 }, { "epoch": 2.29, "grad_norm": 2.44421632041388, "learning_rate": 0.00035466883759805644, "loss": 4.4146, "step": 2006 }, { "epoch": 2.29, "grad_norm": 0.8710743348412628, "learning_rate": 0.0003544504165573638, "loss": 4.454, "step": 2007 }, { "epoch": 2.29, "grad_norm": 2.1615288451290926, "learning_rate": 0.00035423196566594726, "loss": 4.3533, "step": 2008 }, { "epoch": 2.3, "grad_norm": 2.1253801064628823, "learning_rate": 0.0003540134850435656, "loss": 4.4062, "step": 2009 }, { "epoch": 2.3, "grad_norm": 0.9004705450455097, "learning_rate": 0.0003537949748099938, "loss": 4.4734, "step": 2010 }, { "epoch": 2.3, "grad_norm": 1.3047356638265686, "learning_rate": 0.00035357643508502343, "loss": 4.5541, "step": 2011 }, { "epoch": 2.3, "grad_norm": 1.8401669844041402, "learning_rate": 0.00035335786598846177, "loss": 4.3171, "step": 2012 }, { "epoch": 2.3, "grad_norm": 1.7368863093662565, "learning_rate": 0.00035313926764013264, "loss": 4.2369, "step": 2013 }, { "epoch": 2.3, "grad_norm": 1.147659838817981, "learning_rate": 0.00035292064015987536, "loss": 4.2651, "step": 2014 }, { "epoch": 2.3, "grad_norm": 0.9213533850711806, "learning_rate": 0.000352701983667546, "loss": 4.5075, "step": 2015 }, { "epoch": 2.3, "grad_norm": 1.1374542468623616, "learning_rate": 0.0003524832982830158, "loss": 4.2059, "step": 2016 }, { "epoch": 2.31, "grad_norm": 0.5738557515061314, "learning_rate": 0.0003522645841261722, "loss": 4.3067, "step": 2017 }, { "epoch": 2.31, "grad_norm": 1.2020113557378003, "learning_rate": 0.0003520458413169183, "loss": 4.2509, "step": 2018 }, { "epoch": 2.31, "grad_norm": 0.9203762210171189, "learning_rate": 0.00035182706997517304, "loss": 4.5718, "step": 2019 }, { "epoch": 2.31, "grad_norm": 0.8921145514980305, "learning_rate": 0.0003516082702208709, "loss": 4.3716, "step": 2020 }, { "epoch": 2.31, "grad_norm": 0.6362117934549254, "learning_rate": 0.0003513894421739618, "loss": 4.2583, "step": 2021 }, { "epoch": 2.31, "grad_norm": 1.055670495154233, "learning_rate": 0.00035117058595441144, "loss": 4.1751, "step": 2022 }, { "epoch": 2.31, "grad_norm": 0.6940826143919365, "learning_rate": 0.00035095170168220085, "loss": 4.4344, "step": 2023 }, { "epoch": 2.31, "grad_norm": 1.1923763045152749, "learning_rate": 0.00035073278947732637, "loss": 4.5912, "step": 2024 }, { "epoch": 2.31, "grad_norm": 1.1321928326535502, "learning_rate": 0.0003505138494597998, "loss": 4.4396, "step": 2025 }, { "epoch": 2.32, "grad_norm": 1.3559095209723704, "learning_rate": 0.0003502948817496479, "loss": 4.1998, "step": 2026 }, { "epoch": 2.32, "grad_norm": 1.3546591263336438, "learning_rate": 0.0003500758864669131, "loss": 4.302, "step": 2027 }, { "epoch": 2.32, "grad_norm": 1.5204312070809398, "learning_rate": 0.0003498568637316525, "loss": 4.1786, "step": 2028 }, { "epoch": 2.32, "grad_norm": 0.9016302206450255, "learning_rate": 0.0003496378136639384, "loss": 4.3215, "step": 2029 }, { "epoch": 2.32, "grad_norm": 3.0267062261501403, "learning_rate": 0.0003494187363838581, "loss": 4.524, "step": 2030 }, { "epoch": 2.32, "grad_norm": 1.2371216524586623, "learning_rate": 0.0003491996320115138, "loss": 4.3414, "step": 2031 }, { "epoch": 2.32, "grad_norm": 3.237086108649337, "learning_rate": 0.00034898050066702263, "loss": 4.5709, "step": 2032 }, { "epoch": 2.32, "grad_norm": 0.935468666445416, "learning_rate": 0.0003487613424705164, "loss": 4.3614, "step": 2033 }, { "epoch": 2.32, "grad_norm": 2.5961412957220067, "learning_rate": 0.00034854215754214163, "loss": 4.2368, "step": 2034 }, { "epoch": 2.33, "grad_norm": 0.888503913086469, "learning_rate": 0.0003483229460020597, "loss": 4.3581, "step": 2035 }, { "epoch": 2.33, "grad_norm": 1.7336219948015488, "learning_rate": 0.0003481037079704462, "loss": 4.3256, "step": 2036 }, { "epoch": 2.33, "grad_norm": 0.7357181597401315, "learning_rate": 0.0003478844435674917, "loss": 4.5346, "step": 2037 }, { "epoch": 2.33, "grad_norm": 2.3057051491823373, "learning_rate": 0.00034766515291340073, "loss": 4.2294, "step": 2038 }, { "epoch": 2.33, "grad_norm": 1.221037250606619, "learning_rate": 0.0003474458361283928, "loss": 4.3276, "step": 2039 }, { "epoch": 2.33, "grad_norm": 1.4980208600000442, "learning_rate": 0.00034722649333270125, "loss": 4.099, "step": 2040 }, { "epoch": 2.33, "grad_norm": 0.6782516188174156, "learning_rate": 0.00034700712464657375, "loss": 4.3065, "step": 2041 }, { "epoch": 2.33, "grad_norm": 2.647976231915306, "learning_rate": 0.00034678773019027244, "loss": 4.4367, "step": 2042 }, { "epoch": 2.33, "grad_norm": 1.1384975327780065, "learning_rate": 0.00034656831008407333, "loss": 4.3238, "step": 2043 }, { "epoch": 2.34, "grad_norm": 0.6587566614298829, "learning_rate": 0.00034634886444826656, "loss": 4.2638, "step": 2044 }, { "epoch": 2.34, "grad_norm": 1.0483363718211718, "learning_rate": 0.00034612939340315633, "loss": 4.206, "step": 2045 }, { "epoch": 2.34, "grad_norm": 1.8146452213085433, "learning_rate": 0.0003459098970690607, "loss": 4.2934, "step": 2046 }, { "epoch": 2.34, "grad_norm": 1.4575552758300554, "learning_rate": 0.00034569037556631167, "loss": 4.4479, "step": 2047 }, { "epoch": 2.34, "grad_norm": 0.6097205146382844, "learning_rate": 0.0003454708290152548, "loss": 4.4297, "step": 2048 }, { "epoch": 2.34, "grad_norm": 2.2416012832580923, "learning_rate": 0.0003452512575362498, "loss": 4.4094, "step": 2049 }, { "epoch": 2.34, "grad_norm": 0.6247764502966586, "learning_rate": 0.0003450316612496696, "loss": 4.4184, "step": 2050 }, { "epoch": 2.34, "grad_norm": 1.9558155045077503, "learning_rate": 0.00034481204027590105, "loss": 4.4383, "step": 2051 }, { "epoch": 2.35, "grad_norm": 1.817541657081176, "learning_rate": 0.00034459239473534446, "loss": 4.1508, "step": 2052 }, { "epoch": 2.35, "grad_norm": 1.5822298759451072, "learning_rate": 0.0003443727247484134, "loss": 4.5307, "step": 2053 }, { "epoch": 2.35, "grad_norm": 1.6360847704370725, "learning_rate": 0.00034415303043553537, "loss": 4.2026, "step": 2054 }, { "epoch": 2.35, "grad_norm": 0.5822928185260654, "learning_rate": 0.00034393331191715046, "loss": 4.2437, "step": 2055 }, { "epoch": 2.35, "grad_norm": 2.012139310296186, "learning_rate": 0.0003437135693137127, "loss": 4.4965, "step": 2056 }, { "epoch": 2.35, "grad_norm": 2.013341996856864, "learning_rate": 0.0003434938027456887, "loss": 4.3383, "step": 2057 }, { "epoch": 2.35, "grad_norm": 1.877136738561602, "learning_rate": 0.000343274012333559, "loss": 4.266, "step": 2058 }, { "epoch": 2.35, "grad_norm": 2.8404633545866598, "learning_rate": 0.0003430541981978164, "loss": 4.384, "step": 2059 }, { "epoch": 2.35, "grad_norm": 2.1630072917249445, "learning_rate": 0.0003428343604589673, "loss": 4.456, "step": 2060 }, { "epoch": 2.36, "grad_norm": 2.2859663350041606, "learning_rate": 0.00034261449923753053, "loss": 4.692, "step": 2061 }, { "epoch": 2.36, "grad_norm": 3.616893376145364, "learning_rate": 0.0003423946146540385, "loss": 4.5318, "step": 2062 }, { "epoch": 2.36, "grad_norm": 1.4606122379041813, "learning_rate": 0.00034217470682903556, "loss": 4.3132, "step": 2063 }, { "epoch": 2.36, "grad_norm": 2.506174124411359, "learning_rate": 0.0003419547758830796, "loss": 4.489, "step": 2064 }, { "epoch": 2.36, "grad_norm": 1.8098339563369505, "learning_rate": 0.00034173482193674046, "loss": 4.4841, "step": 2065 }, { "epoch": 2.36, "grad_norm": 1.592640341951285, "learning_rate": 0.0003415148451106013, "loss": 4.2266, "step": 2066 }, { "epoch": 2.36, "grad_norm": 0.9374334977527548, "learning_rate": 0.0003412948455252573, "loss": 4.322, "step": 2067 }, { "epoch": 2.36, "grad_norm": 2.022863282643399, "learning_rate": 0.0003410748233013163, "loss": 4.3931, "step": 2068 }, { "epoch": 2.36, "grad_norm": 1.826713062280081, "learning_rate": 0.0003408547785593986, "loss": 4.275, "step": 2069 }, { "epoch": 2.37, "grad_norm": 3.2849605069189574, "learning_rate": 0.0003406347114201368, "loss": 4.3879, "step": 2070 }, { "epoch": 2.37, "grad_norm": 1.8779037976742416, "learning_rate": 0.0003404146220041758, "loss": 4.3096, "step": 2071 }, { "epoch": 2.37, "grad_norm": 1.8216772367562108, "learning_rate": 0.0003401945104321725, "loss": 4.1529, "step": 2072 }, { "epoch": 2.37, "grad_norm": 1.3522536622379977, "learning_rate": 0.0003399743768247963, "loss": 4.237, "step": 2073 }, { "epoch": 2.37, "grad_norm": 2.061632019276528, "learning_rate": 0.0003397542213027284, "loss": 4.399, "step": 2074 }, { "epoch": 2.37, "grad_norm": 0.7650810140939595, "learning_rate": 0.0003395340439866623, "loss": 4.3029, "step": 2075 }, { "epoch": 2.37, "grad_norm": 1.9884145636490744, "learning_rate": 0.000339313844997303, "loss": 4.2263, "step": 2076 }, { "epoch": 2.37, "grad_norm": 0.9462477831585521, "learning_rate": 0.00033909362445536795, "loss": 4.2402, "step": 2077 }, { "epoch": 2.37, "grad_norm": 1.3889386826701697, "learning_rate": 0.0003388733824815859, "loss": 4.2577, "step": 2078 }, { "epoch": 2.38, "grad_norm": 1.1715712716663387, "learning_rate": 0.0003386531191966977, "loss": 4.426, "step": 2079 }, { "epoch": 2.38, "grad_norm": 1.2911118654801799, "learning_rate": 0.0003384328347214557, "loss": 4.2857, "step": 2080 }, { "epoch": 2.38, "grad_norm": 0.688596967688997, "learning_rate": 0.0003382125291766238, "loss": 4.3336, "step": 2081 }, { "epoch": 2.38, "grad_norm": 1.7028085704812395, "learning_rate": 0.00033799220268297773, "loss": 4.1727, "step": 2082 }, { "epoch": 2.38, "grad_norm": 1.5606640292027352, "learning_rate": 0.0003377718553613046, "loss": 4.5283, "step": 2083 }, { "epoch": 2.38, "grad_norm": 1.5995840061181728, "learning_rate": 0.00033755148733240263, "loss": 4.4106, "step": 2084 }, { "epoch": 2.38, "grad_norm": 1.072740181271018, "learning_rate": 0.0003373310987170818, "loss": 4.4641, "step": 2085 }, { "epoch": 2.38, "grad_norm": 0.9747347984798919, "learning_rate": 0.0003371106896361633, "loss": 4.2459, "step": 2086 }, { "epoch": 2.39, "grad_norm": 1.264381319036501, "learning_rate": 0.00033689026021047926, "loss": 4.345, "step": 2087 }, { "epoch": 2.39, "grad_norm": 1.1683204462778978, "learning_rate": 0.00033666981056087336, "loss": 4.2796, "step": 2088 }, { "epoch": 2.39, "grad_norm": 1.1412289658585866, "learning_rate": 0.0003364493408082, "loss": 4.3462, "step": 2089 }, { "epoch": 2.39, "grad_norm": 1.9921328967977208, "learning_rate": 0.0003362288510733249, "loss": 4.1843, "step": 2090 }, { "epoch": 2.39, "grad_norm": 2.6071874418461483, "learning_rate": 0.0003360083414771245, "loss": 4.4001, "step": 2091 }, { "epoch": 2.39, "grad_norm": 1.3315871535438608, "learning_rate": 0.00033578781214048636, "loss": 4.1977, "step": 2092 }, { "epoch": 2.39, "grad_norm": 2.3286616698695477, "learning_rate": 0.0003355672631843086, "loss": 4.2894, "step": 2093 }, { "epoch": 2.39, "grad_norm": 0.5377001062467163, "learning_rate": 0.0003353466947295003, "loss": 4.0858, "step": 2094 }, { "epoch": 2.39, "grad_norm": 2.474559361089454, "learning_rate": 0.00033512610689698117, "loss": 4.3401, "step": 2095 }, { "epoch": 2.4, "grad_norm": 0.8175377242376495, "learning_rate": 0.00033490549980768144, "loss": 4.1963, "step": 2096 }, { "epoch": 2.4, "grad_norm": 2.951475067059975, "learning_rate": 0.00033468487358254216, "loss": 4.3098, "step": 2097 }, { "epoch": 2.4, "grad_norm": 1.218287126185963, "learning_rate": 0.0003344642283425146, "loss": 4.2653, "step": 2098 }, { "epoch": 2.4, "grad_norm": 5.888154726808916, "learning_rate": 0.0003342435642085605, "loss": 4.5174, "step": 2099 }, { "epoch": 2.4, "grad_norm": 5.0807510741464315, "learning_rate": 0.0003340228813016521, "loss": 4.4391, "step": 2100 }, { "epoch": 2.4, "eval_blimp_filtered_avg": 0.5288059701492537, "eval_blimp_filtered_std": 0.005531426344079286, "step": 2100 }, { "epoch": 2.4, "eval_blimp_supplement_avg": 0.5086206896551724, "eval_blimp_supplement_std": 0.022537039417513526, "step": 2100 }, { "epoch": 2.4, "eval_vqa_filtered_avg": 0.36, "eval_vqa_filtered_std": 0.04824181513244218, "step": 2100 }, { "epoch": 2.4, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 2100 }, { "epoch": 2.4, "grad_norm": 4.4702287633316455, "learning_rate": 0.0003338021797427718, "loss": 4.319, "step": 2101 }, { "epoch": 2.4, "grad_norm": 6.132327663173627, "learning_rate": 0.0003335814596529124, "loss": 4.4507, "step": 2102 }, { "epoch": 2.4, "grad_norm": 2.639737031001395, "learning_rate": 0.0003333607211530765, "loss": 4.4528, "step": 2103 }, { "epoch": 2.4, "grad_norm": 2.869568203535451, "learning_rate": 0.00033313996436427706, "loss": 4.3915, "step": 2104 }, { "epoch": 2.41, "grad_norm": 3.5555764617915218, "learning_rate": 0.00033291918940753733, "loss": 4.5243, "step": 2105 }, { "epoch": 2.41, "grad_norm": 0.6478901907461402, "learning_rate": 0.0003326983964038899, "loss": 4.3477, "step": 2106 }, { "epoch": 2.41, "grad_norm": 4.509279748641807, "learning_rate": 0.00033247758547437766, "loss": 4.3939, "step": 2107 }, { "epoch": 2.41, "grad_norm": 0.8128713245805477, "learning_rate": 0.00033225675674005323, "loss": 4.1904, "step": 2108 }, { "epoch": 2.41, "grad_norm": 3.6886622153993964, "learning_rate": 0.00033203591032197915, "loss": 4.3686, "step": 2109 }, { "epoch": 2.41, "grad_norm": 1.5393696559696803, "learning_rate": 0.0003318150463412272, "loss": 4.4038, "step": 2110 }, { "epoch": 2.41, "grad_norm": 1.0441385921930375, "learning_rate": 0.0003315941649188794, "loss": 4.39, "step": 2111 }, { "epoch": 2.41, "grad_norm": 3.650374379334776, "learning_rate": 0.00033137326617602684, "loss": 4.4701, "step": 2112 }, { "epoch": 2.41, "grad_norm": 0.7872597783197107, "learning_rate": 0.0003311523502337704, "loss": 4.1471, "step": 2113 }, { "epoch": 2.42, "grad_norm": 3.1487838100333367, "learning_rate": 0.0003309314172132202, "loss": 4.3344, "step": 2114 }, { "epoch": 2.42, "grad_norm": 2.3063435308502935, "learning_rate": 0.0003307104672354958, "loss": 4.456, "step": 2115 }, { "epoch": 2.42, "grad_norm": 1.1161791010514834, "learning_rate": 0.0003304895004217261, "loss": 4.2653, "step": 2116 }, { "epoch": 2.42, "grad_norm": 2.4142665959262977, "learning_rate": 0.0003302685168930492, "loss": 4.457, "step": 2117 }, { "epoch": 2.42, "grad_norm": 0.6967440730606428, "learning_rate": 0.0003300475167706123, "loss": 4.0471, "step": 2118 }, { "epoch": 2.42, "grad_norm": 2.312932638723793, "learning_rate": 0.0003298265001755718, "loss": 4.3997, "step": 2119 }, { "epoch": 2.42, "grad_norm": 2.7607761055910163, "learning_rate": 0.00032960546722909294, "loss": 4.2669, "step": 2120 }, { "epoch": 2.42, "grad_norm": 8.274198737154466, "learning_rate": 0.0003293844180523503, "loss": 5.108, "step": 2121 }, { "epoch": 2.43, "grad_norm": 5.655252625282623, "learning_rate": 0.00032916335276652703, "loss": 4.664, "step": 2122 }, { "epoch": 2.43, "grad_norm": 3.5245707123847945, "learning_rate": 0.0003289422714928151, "loss": 4.7528, "step": 2123 }, { "epoch": 2.43, "grad_norm": 4.331277846511171, "learning_rate": 0.00032872117435241545, "loss": 4.6241, "step": 2124 }, { "epoch": 2.43, "grad_norm": 2.890652502540131, "learning_rate": 0.0003285000614665375, "loss": 4.9838, "step": 2125 }, { "epoch": 2.43, "grad_norm": 1.6956966277234777, "learning_rate": 0.0003282789329563996, "loss": 4.7498, "step": 2126 }, { "epoch": 2.43, "grad_norm": 1.1082287484685287, "learning_rate": 0.0003280577889432284, "loss": 4.596, "step": 2127 }, { "epoch": 2.43, "grad_norm": 1.3020297373927001, "learning_rate": 0.0003278366295482591, "loss": 4.7916, "step": 2128 }, { "epoch": 2.43, "grad_norm": 1.7851203989761089, "learning_rate": 0.0003276154548927353, "loss": 4.7432, "step": 2129 }, { "epoch": 2.43, "grad_norm": 1.1385184540832236, "learning_rate": 0.0003273942650979092, "loss": 4.6066, "step": 2130 }, { "epoch": 2.44, "grad_norm": 1.373740175633821, "learning_rate": 0.00032717306028504095, "loss": 4.8065, "step": 2131 }, { "epoch": 2.44, "grad_norm": 1.9101432415882775, "learning_rate": 0.0003269518405753993, "loss": 4.6091, "step": 2132 }, { "epoch": 2.44, "grad_norm": 0.8910493646524733, "learning_rate": 0.0003267306060902609, "loss": 4.6262, "step": 2133 }, { "epoch": 2.44, "grad_norm": 1.5586138721525074, "learning_rate": 0.00032650935695091056, "loss": 4.5887, "step": 2134 }, { "epoch": 2.44, "grad_norm": 2.1091828102231855, "learning_rate": 0.000326288093278641, "loss": 4.5476, "step": 2135 }, { "epoch": 2.44, "grad_norm": 1.7272225760692963, "learning_rate": 0.00032606681519475335, "loss": 4.6667, "step": 2136 }, { "epoch": 2.44, "grad_norm": 3.366410812641328, "learning_rate": 0.0003258455228205562, "loss": 4.7115, "step": 2137 }, { "epoch": 2.44, "grad_norm": 1.7996883862360058, "learning_rate": 0.00032562421627736613, "loss": 4.4369, "step": 2138 }, { "epoch": 2.44, "grad_norm": 1.5531571195220526, "learning_rate": 0.0003254028956865073, "loss": 4.3716, "step": 2139 }, { "epoch": 2.45, "grad_norm": 2.3183097340067484, "learning_rate": 0.00032518156116931204, "loss": 4.5631, "step": 2140 }, { "epoch": 2.45, "grad_norm": 2.26479498633631, "learning_rate": 0.00032496021284711987, "loss": 4.5874, "step": 2141 }, { "epoch": 2.45, "grad_norm": 1.0625356774885313, "learning_rate": 0.000324738850841278, "loss": 4.5055, "step": 2142 }, { "epoch": 2.45, "grad_norm": 1.1089949405040678, "learning_rate": 0.00032451747527314123, "loss": 4.56, "step": 2143 }, { "epoch": 2.45, "grad_norm": 2.953285581630864, "learning_rate": 0.0003242960862640717, "loss": 4.5574, "step": 2144 }, { "epoch": 2.45, "grad_norm": 1.1041663084556672, "learning_rate": 0.00032407468393543903, "loss": 4.5456, "step": 2145 }, { "epoch": 2.45, "grad_norm": 2.42304304205384, "learning_rate": 0.00032385326840861996, "loss": 4.5343, "step": 2146 }, { "epoch": 2.45, "grad_norm": 2.470983964809429, "learning_rate": 0.00032363183980499866, "loss": 4.5649, "step": 2147 }, { "epoch": 2.45, "grad_norm": 1.1466249138533289, "learning_rate": 0.00032341039824596626, "loss": 4.5266, "step": 2148 }, { "epoch": 2.46, "grad_norm": 2.889864992126179, "learning_rate": 0.0003231889438529213, "loss": 4.4206, "step": 2149 }, { "epoch": 2.46, "grad_norm": 2.032203993825711, "learning_rate": 0.00032296747674726895, "loss": 4.2761, "step": 2150 }, { "epoch": 2.46, "grad_norm": 0.7774473849767566, "learning_rate": 0.00032274599705042165, "loss": 4.2463, "step": 2151 }, { "epoch": 2.46, "grad_norm": 2.2466082496018474, "learning_rate": 0.00032252450488379875, "loss": 4.4972, "step": 2152 }, { "epoch": 2.46, "grad_norm": 1.6969391965348628, "learning_rate": 0.0003223030003688263, "loss": 4.5117, "step": 2153 }, { "epoch": 2.46, "grad_norm": 1.2238753623515313, "learning_rate": 0.0003220814836269371, "loss": 4.5193, "step": 2154 }, { "epoch": 2.46, "grad_norm": 2.5551645975756583, "learning_rate": 0.00032185995477957075, "loss": 4.4527, "step": 2155 }, { "epoch": 2.46, "grad_norm": 1.0916470991071967, "learning_rate": 0.0003216384139481735, "loss": 4.4665, "step": 2156 }, { "epoch": 2.47, "grad_norm": 0.9132242940534141, "learning_rate": 0.0003214168612541981, "loss": 4.3322, "step": 2157 }, { "epoch": 2.47, "grad_norm": 1.8995121921835332, "learning_rate": 0.0003211952968191038, "loss": 4.3415, "step": 2158 }, { "epoch": 2.47, "grad_norm": 1.0949373159669995, "learning_rate": 0.00032097372076435635, "loss": 4.1976, "step": 2159 }, { "epoch": 2.47, "grad_norm": 0.8777176642543518, "learning_rate": 0.00032075213321142783, "loss": 4.4238, "step": 2160 }, { "epoch": 2.47, "grad_norm": 2.2927080568614344, "learning_rate": 0.00032053053428179667, "loss": 4.6023, "step": 2161 }, { "epoch": 2.47, "grad_norm": 1.0130556060030205, "learning_rate": 0.00032030892409694734, "loss": 4.2252, "step": 2162 }, { "epoch": 2.47, "grad_norm": 0.8784144258398178, "learning_rate": 0.00032008730277837083, "loss": 4.5588, "step": 2163 }, { "epoch": 2.47, "grad_norm": 1.8822153552017953, "learning_rate": 0.00031986567044756396, "loss": 4.4684, "step": 2164 }, { "epoch": 2.47, "grad_norm": 0.7763324998164362, "learning_rate": 0.00031964402722602965, "loss": 4.3765, "step": 2165 }, { "epoch": 2.48, "grad_norm": 0.6757496395307732, "learning_rate": 0.0003194223732352768, "loss": 4.1134, "step": 2166 }, { "epoch": 2.48, "grad_norm": 1.7161414040768455, "learning_rate": 0.0003192007085968202, "loss": 4.3379, "step": 2167 }, { "epoch": 2.48, "grad_norm": 0.9792547502785244, "learning_rate": 0.0003189790334321807, "loss": 4.3109, "step": 2168 }, { "epoch": 2.48, "grad_norm": 0.6235680072004474, "learning_rate": 0.00031875734786288436, "loss": 4.3398, "step": 2169 }, { "epoch": 2.48, "grad_norm": 1.1369774628459963, "learning_rate": 0.00031853565201046363, "loss": 4.4931, "step": 2170 }, { "epoch": 2.48, "grad_norm": 1.0451450847769115, "learning_rate": 0.00031831394599645606, "loss": 4.4587, "step": 2171 }, { "epoch": 2.48, "grad_norm": 1.1372854242545163, "learning_rate": 0.0003180922299424052, "loss": 4.4317, "step": 2172 }, { "epoch": 2.48, "grad_norm": 0.7165175615987772, "learning_rate": 0.00031787050396985965, "loss": 4.4702, "step": 2173 }, { "epoch": 2.48, "grad_norm": 0.7593508164417825, "learning_rate": 0.0003176487682003737, "loss": 4.315, "step": 2174 }, { "epoch": 2.49, "grad_norm": 0.7147793222473221, "learning_rate": 0.00031742702275550723, "loss": 4.475, "step": 2175 }, { "epoch": 2.49, "grad_norm": 0.8087601859267176, "learning_rate": 0.00031720526775682494, "loss": 4.1864, "step": 2176 }, { "epoch": 2.49, "grad_norm": 2.359881403152142, "learning_rate": 0.0003169835033258972, "loss": 4.365, "step": 2177 }, { "epoch": 2.49, "grad_norm": 1.5854663866430123, "learning_rate": 0.00031676172958429916, "loss": 4.2412, "step": 2178 }, { "epoch": 2.49, "grad_norm": 1.9417179104411515, "learning_rate": 0.0003165399466536115, "loss": 4.3947, "step": 2179 }, { "epoch": 2.49, "grad_norm": 1.96477900716057, "learning_rate": 0.00031631815465541955, "loss": 4.4546, "step": 2180 }, { "epoch": 2.49, "grad_norm": 0.6963270614724165, "learning_rate": 0.00031609635371131396, "loss": 4.4116, "step": 2181 }, { "epoch": 2.49, "grad_norm": 1.290415040944062, "learning_rate": 0.00031587454394288987, "loss": 4.4453, "step": 2182 }, { "epoch": 2.49, "grad_norm": 1.5534969938751992, "learning_rate": 0.00031565272547174765, "loss": 4.2669, "step": 2183 }, { "epoch": 2.5, "grad_norm": 1.259650263339209, "learning_rate": 0.0003154308984194922, "loss": 4.3684, "step": 2184 }, { "epoch": 2.5, "grad_norm": 1.156932969795427, "learning_rate": 0.00031520906290773333, "loss": 4.4865, "step": 2185 }, { "epoch": 2.5, "grad_norm": 1.9591340804816504, "learning_rate": 0.00031498721905808523, "loss": 4.4401, "step": 2186 }, { "epoch": 2.5, "grad_norm": 1.0239763053563462, "learning_rate": 0.0003147653669921669, "loss": 4.3722, "step": 2187 }, { "epoch": 2.5, "grad_norm": 0.8913263511878871, "learning_rate": 0.00031454350683160167, "loss": 4.3291, "step": 2188 }, { "epoch": 2.5, "grad_norm": 3.072686650128036, "learning_rate": 0.0003143216386980174, "loss": 4.6129, "step": 2189 }, { "epoch": 2.5, "grad_norm": 3.5280147900542698, "learning_rate": 0.0003140997627130463, "loss": 4.3233, "step": 2190 }, { "epoch": 2.5, "grad_norm": 1.3157124403149179, "learning_rate": 0.00031387787899832487, "loss": 4.2192, "step": 2191 }, { "epoch": 2.51, "grad_norm": 1.8332603987957956, "learning_rate": 0.0003136559876754939, "loss": 4.3905, "step": 2192 }, { "epoch": 2.51, "grad_norm": 4.940803043047535, "learning_rate": 0.0003134340888661983, "loss": 4.3358, "step": 2193 }, { "epoch": 2.51, "grad_norm": 1.2604112487708798, "learning_rate": 0.00031321218269208693, "loss": 4.1901, "step": 2194 }, { "epoch": 2.51, "grad_norm": 5.321321063440286, "learning_rate": 0.00031299026927481314, "loss": 4.43, "step": 2195 }, { "epoch": 2.51, "grad_norm": 0.6846007339144923, "learning_rate": 0.0003127683487360338, "loss": 4.2143, "step": 2196 }, { "epoch": 2.51, "grad_norm": 2.5946532702718637, "learning_rate": 0.0003125464211974098, "loss": 4.2905, "step": 2197 }, { "epoch": 2.51, "grad_norm": 3.4543648926416655, "learning_rate": 0.00031232448678060606, "loss": 4.3033, "step": 2198 }, { "epoch": 2.51, "grad_norm": 0.5003184335551039, "learning_rate": 0.000312102545607291, "loss": 4.3346, "step": 2199 }, { "epoch": 2.51, "grad_norm": 1.6895045016514099, "learning_rate": 0.00031188059779913707, "loss": 4.2035, "step": 2200 }, { "epoch": 2.51, "eval_blimp_filtered_avg": 0.5288059701492537, "eval_blimp_filtered_std": 0.005435360900795471, "step": 2200 }, { "epoch": 2.51, "eval_blimp_supplement_avg": 0.5172413793103449, "eval_blimp_supplement_std": 0.022874433495946925, "step": 2200 }, { "epoch": 2.51, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 2200 }, { "epoch": 2.51, "eval_winoground_filtered_avg": 0.49, "eval_winoground_filtered_std": 0.05024183937956912, "step": 2200 }, { "epoch": 2.52, "grad_norm": 2.3209570366329744, "learning_rate": 0.00031165864347781983, "loss": 4.3946, "step": 2201 }, { "epoch": 2.52, "grad_norm": 1.3459952245884348, "learning_rate": 0.000311436682765019, "loss": 4.3824, "step": 2202 }, { "epoch": 2.52, "grad_norm": 0.9727966252921615, "learning_rate": 0.0003112147157824174, "loss": 4.3544, "step": 2203 }, { "epoch": 2.52, "grad_norm": 2.4600080752103253, "learning_rate": 0.0003109927426517016, "loss": 4.393, "step": 2204 }, { "epoch": 2.52, "grad_norm": 1.7644956125317075, "learning_rate": 0.00031077076349456107, "loss": 4.327, "step": 2205 }, { "epoch": 2.52, "grad_norm": 1.4802207567157337, "learning_rate": 0.0003105487784326893, "loss": 4.3617, "step": 2206 }, { "epoch": 2.52, "grad_norm": 3.2045241291362014, "learning_rate": 0.00031032678758778225, "loss": 4.5737, "step": 2207 }, { "epoch": 2.52, "grad_norm": 4.085373145762028, "learning_rate": 0.00031010479108153964, "loss": 4.2875, "step": 2208 }, { "epoch": 2.52, "grad_norm": 0.7149033060207812, "learning_rate": 0.0003098827890356637, "loss": 4.3896, "step": 2209 }, { "epoch": 2.53, "grad_norm": 3.447788438106106, "learning_rate": 0.00030966078157186047, "loss": 4.3652, "step": 2210 }, { "epoch": 2.53, "grad_norm": 1.2359919873591187, "learning_rate": 0.0003094387688118384, "loss": 4.3119, "step": 2211 }, { "epoch": 2.53, "grad_norm": 0.8927472061604494, "learning_rate": 0.00030921675087730886, "loss": 4.5134, "step": 2212 }, { "epoch": 2.53, "grad_norm": 2.4089725296561126, "learning_rate": 0.00030899472788998623, "loss": 4.2828, "step": 2213 }, { "epoch": 2.53, "grad_norm": 1.7358133305135421, "learning_rate": 0.0003087726999715877, "loss": 4.3742, "step": 2214 }, { "epoch": 2.53, "grad_norm": 0.8483383066374621, "learning_rate": 0.0003085506672438331, "loss": 4.3095, "step": 2215 }, { "epoch": 2.53, "grad_norm": 4.888578228305651, "learning_rate": 0.0003083286298284447, "loss": 4.1725, "step": 2216 }, { "epoch": 2.53, "grad_norm": 2.671181932320894, "learning_rate": 0.0003081065878471477, "loss": 4.2494, "step": 2217 }, { "epoch": 2.53, "grad_norm": 1.3288839725642714, "learning_rate": 0.0003078845414216694, "loss": 4.415, "step": 2218 }, { "epoch": 2.54, "grad_norm": 1.139424986936041, "learning_rate": 0.00030766249067374, "loss": 4.2287, "step": 2219 }, { "epoch": 2.54, "grad_norm": 2.4580496699950736, "learning_rate": 0.0003074404357250916, "loss": 4.3068, "step": 2220 }, { "epoch": 2.54, "grad_norm": 1.9850679353102665, "learning_rate": 0.000307218376697459, "loss": 4.1687, "step": 2221 }, { "epoch": 2.54, "grad_norm": 1.9362252973349048, "learning_rate": 0.00030699631371257894, "loss": 4.4689, "step": 2222 }, { "epoch": 2.54, "grad_norm": 2.250279081020961, "learning_rate": 0.00030677424689219056, "loss": 4.4239, "step": 2223 }, { "epoch": 2.54, "grad_norm": 3.540751604055826, "learning_rate": 0.00030655217635803487, "loss": 4.4128, "step": 2224 }, { "epoch": 2.54, "grad_norm": 5.158894822222172, "learning_rate": 0.00030633010223185504, "loss": 4.504, "step": 2225 }, { "epoch": 2.54, "grad_norm": 4.624796486895582, "learning_rate": 0.00030610802463539635, "loss": 4.4197, "step": 2226 }, { "epoch": 2.55, "grad_norm": 5.438886645806711, "learning_rate": 0.00030588594369040565, "loss": 4.5508, "step": 2227 }, { "epoch": 2.55, "grad_norm": 4.93942618697539, "learning_rate": 0.000305663859518632, "loss": 4.7087, "step": 2228 }, { "epoch": 2.55, "grad_norm": 2.2739636397143284, "learning_rate": 0.0003054417722418259, "loss": 4.512, "step": 2229 }, { "epoch": 2.55, "grad_norm": 3.6609241250958147, "learning_rate": 0.00030521968198173987, "loss": 4.6358, "step": 2230 }, { "epoch": 2.55, "grad_norm": 2.582776370814491, "learning_rate": 0.00030499758886012765, "loss": 4.7484, "step": 2231 }, { "epoch": 2.55, "grad_norm": 2.4414297139026595, "learning_rate": 0.000304775492998745, "loss": 4.5734, "step": 2232 }, { "epoch": 2.55, "grad_norm": 2.173766589208741, "learning_rate": 0.0003045533945193488, "loss": 4.3983, "step": 2233 }, { "epoch": 2.55, "grad_norm": 1.9696876191556565, "learning_rate": 0.00030433129354369773, "loss": 4.4891, "step": 2234 }, { "epoch": 2.55, "grad_norm": 3.338010937453103, "learning_rate": 0.0003041091901935515, "loss": 4.5843, "step": 2235 }, { "epoch": 2.56, "grad_norm": 2.997217390466014, "learning_rate": 0.0003038870845906713, "loss": 4.5074, "step": 2236 }, { "epoch": 2.56, "grad_norm": 5.188210538499853, "learning_rate": 0.00030366497685681954, "loss": 4.5048, "step": 2237 }, { "epoch": 2.56, "grad_norm": 1.6740658443714023, "learning_rate": 0.0003034428671137599, "loss": 4.4301, "step": 2238 }, { "epoch": 2.56, "grad_norm": 1.329003769751257, "learning_rate": 0.0003032207554832568, "loss": 4.3007, "step": 2239 }, { "epoch": 2.56, "grad_norm": 1.7048098814772614, "learning_rate": 0.00030299864208707607, "loss": 4.1912, "step": 2240 }, { "epoch": 2.56, "grad_norm": 1.4070091656123644, "learning_rate": 0.0003027765270469844, "loss": 4.2864, "step": 2241 }, { "epoch": 2.56, "grad_norm": 0.9162101303481792, "learning_rate": 0.0003025544104847493, "loss": 4.3008, "step": 2242 }, { "epoch": 2.56, "grad_norm": 2.046456942046342, "learning_rate": 0.00030233229252213926, "loss": 4.4101, "step": 2243 }, { "epoch": 2.56, "grad_norm": 0.7535899146283395, "learning_rate": 0.00030211017328092327, "loss": 4.4258, "step": 2244 }, { "epoch": 2.57, "grad_norm": 0.7116533731339875, "learning_rate": 0.00030188805288287136, "loss": 4.1992, "step": 2245 }, { "epoch": 2.57, "grad_norm": 1.5018265346131665, "learning_rate": 0.00030166593144975403, "loss": 4.3136, "step": 2246 }, { "epoch": 2.57, "grad_norm": 0.9684224100150691, "learning_rate": 0.0003014438091033423, "loss": 4.491, "step": 2247 }, { "epoch": 2.57, "grad_norm": 1.080287473275391, "learning_rate": 0.00030122168596540764, "loss": 4.2714, "step": 2248 }, { "epoch": 2.57, "grad_norm": 1.5536016897440224, "learning_rate": 0.0003009995621577224, "loss": 4.471, "step": 2249 }, { "epoch": 2.57, "grad_norm": 1.1398120180410132, "learning_rate": 0.00030077743780205856, "loss": 4.1575, "step": 2250 }, { "epoch": 2.57, "grad_norm": 1.0505921432368766, "learning_rate": 0.00030055531302018907, "loss": 4.505, "step": 2251 }, { "epoch": 2.57, "grad_norm": 2.6875402043445167, "learning_rate": 0.00030033318793388656, "loss": 4.3958, "step": 2252 }, { "epoch": 2.57, "grad_norm": 0.7150643426274241, "learning_rate": 0.0003001110626649244, "loss": 4.4882, "step": 2253 }, { "epoch": 2.58, "grad_norm": 1.380782414167924, "learning_rate": 0.00029988893733507566, "loss": 4.3245, "step": 2254 }, { "epoch": 2.58, "grad_norm": 1.7020007314942747, "learning_rate": 0.00029966681206611344, "loss": 4.312, "step": 2255 }, { "epoch": 2.58, "grad_norm": 0.5236301090997395, "learning_rate": 0.00029944468697981093, "loss": 4.2915, "step": 2256 }, { "epoch": 2.58, "grad_norm": 1.4904266813120155, "learning_rate": 0.0002992225621979414, "loss": 4.3707, "step": 2257 }, { "epoch": 2.58, "grad_norm": 0.9724120414575723, "learning_rate": 0.0002990004378422777, "loss": 4.3381, "step": 2258 }, { "epoch": 2.58, "grad_norm": 1.0816068780481227, "learning_rate": 0.0002987783140345923, "loss": 4.3347, "step": 2259 }, { "epoch": 2.58, "grad_norm": 0.4504179408684841, "learning_rate": 0.0002985561908966577, "loss": 4.4011, "step": 2260 }, { "epoch": 2.58, "grad_norm": 3.7973584630116908, "learning_rate": 0.0002983340685502459, "loss": 4.2082, "step": 2261 }, { "epoch": 2.59, "grad_norm": 4.309066969335737, "learning_rate": 0.0002981119471171287, "loss": 4.4639, "step": 2262 }, { "epoch": 2.59, "grad_norm": 2.251558124574615, "learning_rate": 0.00029788982671907673, "loss": 4.3506, "step": 2263 }, { "epoch": 2.59, "grad_norm": 0.9136989308039597, "learning_rate": 0.0002976677074778608, "loss": 4.266, "step": 2264 }, { "epoch": 2.59, "grad_norm": 2.9386342621317563, "learning_rate": 0.00029744558951525066, "loss": 4.3484, "step": 2265 }, { "epoch": 2.59, "grad_norm": 1.1251874111219151, "learning_rate": 0.00029722347295301556, "loss": 4.2438, "step": 2266 }, { "epoch": 2.59, "grad_norm": 1.2944831528535317, "learning_rate": 0.00029700135791292393, "loss": 4.3637, "step": 2267 }, { "epoch": 2.59, "grad_norm": 2.355721183286784, "learning_rate": 0.0002967792445167432, "loss": 4.3078, "step": 2268 }, { "epoch": 2.59, "grad_norm": 0.6449674217044504, "learning_rate": 0.0002965571328862401, "loss": 4.4297, "step": 2269 }, { "epoch": 2.59, "grad_norm": 0.6430677757973748, "learning_rate": 0.00029633502314318035, "loss": 4.2022, "step": 2270 }, { "epoch": 2.6, "grad_norm": 2.0557078403562836, "learning_rate": 0.00029611291540932865, "loss": 4.4227, "step": 2271 }, { "epoch": 2.6, "grad_norm": 0.6065317747631467, "learning_rate": 0.0002958908098064485, "loss": 4.212, "step": 2272 }, { "epoch": 2.6, "grad_norm": 5.447514734150979, "learning_rate": 0.0002956687064563022, "loss": 4.1951, "step": 2273 }, { "epoch": 2.6, "grad_norm": 3.7166608530222605, "learning_rate": 0.0002954466054806511, "loss": 4.2941, "step": 2274 }, { "epoch": 2.6, "grad_norm": 1.859944420184991, "learning_rate": 0.000295224507001255, "loss": 4.2357, "step": 2275 }, { "epoch": 2.6, "grad_norm": 3.442382217026208, "learning_rate": 0.0002950024111398723, "loss": 4.214, "step": 2276 }, { "epoch": 2.6, "grad_norm": 3.6262753871302342, "learning_rate": 0.00029478031801826013, "loss": 4.3396, "step": 2277 }, { "epoch": 2.6, "grad_norm": 5.409394165865939, "learning_rate": 0.000294558227758174, "loss": 4.5918, "step": 2278 }, { "epoch": 2.6, "grad_norm": 1.2352726590298537, "learning_rate": 0.000294336140481368, "loss": 4.2783, "step": 2279 }, { "epoch": 2.61, "grad_norm": 2.3111333987220926, "learning_rate": 0.0002941140563095943, "loss": 4.4927, "step": 2280 }, { "epoch": 2.61, "grad_norm": 3.09885958819297, "learning_rate": 0.00029389197536460365, "loss": 4.4625, "step": 2281 }, { "epoch": 2.61, "grad_norm": 1.975683469560857, "learning_rate": 0.0002936698977681449, "loss": 4.1609, "step": 2282 }, { "epoch": 2.61, "grad_norm": 0.7578148621399394, "learning_rate": 0.0002934478236419651, "loss": 4.2658, "step": 2283 }, { "epoch": 2.61, "grad_norm": 0.9413044418629747, "learning_rate": 0.00029322575310780944, "loss": 4.3923, "step": 2284 }, { "epoch": 2.61, "grad_norm": 3.108926660752368, "learning_rate": 0.000293003686287421, "loss": 4.4338, "step": 2285 }, { "epoch": 2.61, "grad_norm": 1.3066627303412668, "learning_rate": 0.00029278162330254097, "loss": 4.3493, "step": 2286 }, { "epoch": 2.61, "grad_norm": 1.3567465151123534, "learning_rate": 0.00029255956427490834, "loss": 4.4, "step": 2287 }, { "epoch": 2.61, "grad_norm": 3.7553266799879896, "learning_rate": 0.00029233750932626003, "loss": 4.4734, "step": 2288 }, { "epoch": 2.62, "grad_norm": 1.1273898421543045, "learning_rate": 0.0002921154585783306, "loss": 4.3404, "step": 2289 }, { "epoch": 2.62, "grad_norm": 2.0927583620782526, "learning_rate": 0.0002918934121528523, "loss": 4.2912, "step": 2290 }, { "epoch": 2.62, "grad_norm": 1.6898422140242562, "learning_rate": 0.00029167137017155527, "loss": 4.4021, "step": 2291 }, { "epoch": 2.62, "grad_norm": 1.0696671970440566, "learning_rate": 0.00029144933275616696, "loss": 4.4117, "step": 2292 }, { "epoch": 2.62, "grad_norm": 0.9557313765926126, "learning_rate": 0.0002912273000284123, "loss": 4.4116, "step": 2293 }, { "epoch": 2.62, "grad_norm": 1.5563010420761698, "learning_rate": 0.00029100527211001377, "loss": 4.3476, "step": 2294 }, { "epoch": 2.62, "grad_norm": 0.9426540689483062, "learning_rate": 0.0002907832491226911, "loss": 4.4253, "step": 2295 }, { "epoch": 2.62, "grad_norm": 0.5994127975902223, "learning_rate": 0.00029056123118816166, "loss": 4.2409, "step": 2296 }, { "epoch": 2.63, "grad_norm": 1.5083264269189016, "learning_rate": 0.00029033921842813953, "loss": 4.2943, "step": 2297 }, { "epoch": 2.63, "grad_norm": 1.3448793667983174, "learning_rate": 0.0002901172109643362, "loss": 3.9476, "step": 2298 }, { "epoch": 2.63, "grad_norm": 0.5688283858563615, "learning_rate": 0.00028989520891846036, "loss": 4.2914, "step": 2299 }, { "epoch": 2.63, "grad_norm": 1.3115752912377703, "learning_rate": 0.00028967321241221765, "loss": 4.4191, "step": 2300 }, { "epoch": 2.63, "eval_blimp_filtered_avg": 0.5271641791044777, "eval_blimp_filtered_std": 0.0055033061926466975, "step": 2300 }, { "epoch": 2.63, "eval_blimp_supplement_avg": 0.5043103448275862, "eval_blimp_supplement_std": 0.022841245982437135, "step": 2300 }, { "epoch": 2.63, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 2300 }, { "epoch": 2.63, "eval_winoground_filtered_avg": 0.49, "eval_winoground_filtered_std": 0.05024183937956912, "step": 2300 }, { "epoch": 2.63, "grad_norm": 1.4079782126170108, "learning_rate": 0.00028945122156731075, "loss": 4.1759, "step": 2301 }, { "epoch": 2.63, "grad_norm": 1.0672890614094868, "learning_rate": 0.0002892292365054389, "loss": 4.2851, "step": 2302 }, { "epoch": 2.63, "grad_norm": 0.8273065827177049, "learning_rate": 0.00028900725734829836, "loss": 4.3457, "step": 2303 }, { "epoch": 2.63, "grad_norm": 4.27879900863289, "learning_rate": 0.00028878528421758253, "loss": 4.3369, "step": 2304 }, { "epoch": 2.63, "grad_norm": 2.9729041182523845, "learning_rate": 0.00028856331723498105, "loss": 4.4518, "step": 2305 }, { "epoch": 2.64, "grad_norm": 5.8485080558022755, "learning_rate": 0.00028834135652218017, "loss": 4.4518, "step": 2306 }, { "epoch": 2.64, "grad_norm": 2.696541393972968, "learning_rate": 0.000288119402200863, "loss": 4.09, "step": 2307 }, { "epoch": 2.64, "grad_norm": 2.7052006660951307, "learning_rate": 0.00028789745439270886, "loss": 4.3882, "step": 2308 }, { "epoch": 2.64, "grad_norm": 5.640204300057213, "learning_rate": 0.000287675513219394, "loss": 4.5902, "step": 2309 }, { "epoch": 2.64, "grad_norm": 1.4050318150262908, "learning_rate": 0.0002874535788025902, "loss": 4.4239, "step": 2310 }, { "epoch": 2.64, "grad_norm": 1.4049010323756423, "learning_rate": 0.0002872316512639662, "loss": 4.4407, "step": 2311 }, { "epoch": 2.64, "grad_norm": 2.71760796655439, "learning_rate": 0.0002870097307251868, "loss": 4.3448, "step": 2312 }, { "epoch": 2.64, "grad_norm": 2.357070726389828, "learning_rate": 0.0002867878173079131, "loss": 4.4178, "step": 2313 }, { "epoch": 2.64, "grad_norm": 0.9962240860109267, "learning_rate": 0.0002865659111338018, "loss": 4.0915, "step": 2314 }, { "epoch": 2.65, "grad_norm": 1.2333832928155484, "learning_rate": 0.0002863440123245061, "loss": 4.3536, "step": 2315 }, { "epoch": 2.65, "grad_norm": 2.612066502389392, "learning_rate": 0.0002861221210016751, "loss": 4.4801, "step": 2316 }, { "epoch": 2.65, "grad_norm": 1.4404549516657346, "learning_rate": 0.00028590023728695366, "loss": 4.6146, "step": 2317 }, { "epoch": 2.65, "grad_norm": 0.974081918749349, "learning_rate": 0.0002856783613019826, "loss": 4.291, "step": 2318 }, { "epoch": 2.65, "grad_norm": 0.7135896416775674, "learning_rate": 0.00028545649316839833, "loss": 4.2509, "step": 2319 }, { "epoch": 2.65, "grad_norm": 1.4725686269649927, "learning_rate": 0.00028523463300783304, "loss": 4.3603, "step": 2320 }, { "epoch": 2.65, "grad_norm": 1.1269754694963419, "learning_rate": 0.00028501278094191467, "loss": 4.3943, "step": 2321 }, { "epoch": 2.65, "grad_norm": 0.9147364453050987, "learning_rate": 0.0002847909370922667, "loss": 4.3609, "step": 2322 }, { "epoch": 2.65, "grad_norm": 0.8150969047583511, "learning_rate": 0.00028456910158050776, "loss": 4.3304, "step": 2323 }, { "epoch": 2.66, "grad_norm": 0.7857459651465912, "learning_rate": 0.00028434727452825235, "loss": 4.3625, "step": 2324 }, { "epoch": 2.66, "grad_norm": 1.9030219005323503, "learning_rate": 0.0002841254560571101, "loss": 4.2372, "step": 2325 }, { "epoch": 2.66, "grad_norm": 1.1078058058514557, "learning_rate": 0.0002839036462886861, "loss": 4.0541, "step": 2326 }, { "epoch": 2.66, "grad_norm": 0.6707546803745114, "learning_rate": 0.0002836818453445804, "loss": 4.3139, "step": 2327 }, { "epoch": 2.66, "grad_norm": 1.9819440745730001, "learning_rate": 0.0002834600533463885, "loss": 4.2085, "step": 2328 }, { "epoch": 2.66, "grad_norm": 1.7426465597479164, "learning_rate": 0.0002832382704157008, "loss": 4.3612, "step": 2329 }, { "epoch": 2.66, "grad_norm": 1.255295869223125, "learning_rate": 0.0002830164966741028, "loss": 4.1483, "step": 2330 }, { "epoch": 2.66, "grad_norm": 2.5220371518609976, "learning_rate": 0.000282794732243175, "loss": 4.1273, "step": 2331 }, { "epoch": 2.67, "grad_norm": 2.7753217141279873, "learning_rate": 0.00028257297724449277, "loss": 4.1221, "step": 2332 }, { "epoch": 2.67, "grad_norm": 1.6283196173403756, "learning_rate": 0.0002823512317996262, "loss": 4.3878, "step": 2333 }, { "epoch": 2.67, "grad_norm": 3.112398148856668, "learning_rate": 0.00028212949603014035, "loss": 4.3286, "step": 2334 }, { "epoch": 2.67, "grad_norm": 2.0207375487858874, "learning_rate": 0.00028190777005759486, "loss": 4.2133, "step": 2335 }, { "epoch": 2.67, "grad_norm": 1.1668499718240484, "learning_rate": 0.0002816860540035439, "loss": 4.3273, "step": 2336 }, { "epoch": 2.67, "grad_norm": 2.328319795023674, "learning_rate": 0.00028146434798953637, "loss": 4.3533, "step": 2337 }, { "epoch": 2.67, "grad_norm": 2.1403670790659244, "learning_rate": 0.0002812426521371156, "loss": 4.3453, "step": 2338 }, { "epoch": 2.67, "grad_norm": 0.6182615651649437, "learning_rate": 0.00028102096656781937, "loss": 4.2018, "step": 2339 }, { "epoch": 2.67, "grad_norm": 1.2379013745070362, "learning_rate": 0.0002807992914031797, "loss": 4.2253, "step": 2340 }, { "epoch": 2.68, "grad_norm": 1.32228929142265, "learning_rate": 0.0002805776267647232, "loss": 4.2342, "step": 2341 }, { "epoch": 2.68, "grad_norm": 0.8389277281365515, "learning_rate": 0.0002803559727739703, "loss": 4.1933, "step": 2342 }, { "epoch": 2.68, "grad_norm": 1.063081859360418, "learning_rate": 0.00028013432955243604, "loss": 4.5445, "step": 2343 }, { "epoch": 2.68, "grad_norm": 1.2760985773935203, "learning_rate": 0.00027991269722162917, "loss": 4.2203, "step": 2344 }, { "epoch": 2.68, "grad_norm": 1.4864970732377776, "learning_rate": 0.0002796910759030526, "loss": 4.3592, "step": 2345 }, { "epoch": 2.68, "grad_norm": 0.4606536184524214, "learning_rate": 0.0002794694657182033, "loss": 4.2188, "step": 2346 }, { "epoch": 2.68, "grad_norm": 1.262518679012878, "learning_rate": 0.00027924786678857217, "loss": 4.2866, "step": 2347 }, { "epoch": 2.68, "grad_norm": 2.423756465643589, "learning_rate": 0.00027902627923564365, "loss": 4.2946, "step": 2348 }, { "epoch": 2.68, "grad_norm": 1.5763320717818459, "learning_rate": 0.00027880470318089617, "loss": 4.1811, "step": 2349 }, { "epoch": 2.69, "grad_norm": 1.4726902693921242, "learning_rate": 0.00027858313874580185, "loss": 4.2424, "step": 2350 }, { "epoch": 2.69, "grad_norm": 0.5870611920388359, "learning_rate": 0.00027836158605182656, "loss": 4.2, "step": 2351 }, { "epoch": 2.69, "grad_norm": 0.7465765263609636, "learning_rate": 0.0002781400452204293, "loss": 4.35, "step": 2352 }, { "epoch": 2.69, "grad_norm": 0.9933353619229455, "learning_rate": 0.0002779185163730629, "loss": 4.4204, "step": 2353 }, { "epoch": 2.69, "grad_norm": 0.6985505623737489, "learning_rate": 0.0002776969996311737, "loss": 4.2703, "step": 2354 }, { "epoch": 2.69, "grad_norm": 1.0380363971616795, "learning_rate": 0.0002774754951162012, "loss": 4.3091, "step": 2355 }, { "epoch": 2.69, "grad_norm": 3.1629785769881646, "learning_rate": 0.0002772540029495784, "loss": 4.5021, "step": 2356 }, { "epoch": 2.69, "grad_norm": 2.5476361888916004, "learning_rate": 0.00027703252325273105, "loss": 4.3604, "step": 2357 }, { "epoch": 2.69, "grad_norm": 0.8130295054536668, "learning_rate": 0.0002768110561470787, "loss": 4.2637, "step": 2358 }, { "epoch": 2.7, "grad_norm": 0.825002961380643, "learning_rate": 0.0002765896017540337, "loss": 4.4267, "step": 2359 }, { "epoch": 2.7, "grad_norm": 0.673164769585789, "learning_rate": 0.00027636816019500145, "loss": 4.4508, "step": 2360 }, { "epoch": 2.7, "grad_norm": 0.5538523670227395, "learning_rate": 0.00027614673159138, "loss": 4.4611, "step": 2361 }, { "epoch": 2.7, "grad_norm": 1.3400379169374679, "learning_rate": 0.0002759253160645609, "loss": 4.3602, "step": 2362 }, { "epoch": 2.7, "grad_norm": 2.0523736306199836, "learning_rate": 0.0002757039137359282, "loss": 4.2375, "step": 2363 }, { "epoch": 2.7, "grad_norm": 0.8380415195633117, "learning_rate": 0.00027548252472685877, "loss": 4.3231, "step": 2364 }, { "epoch": 2.7, "grad_norm": 1.4902812522394382, "learning_rate": 0.00027526114915872195, "loss": 4.4165, "step": 2365 }, { "epoch": 2.7, "grad_norm": 0.8208638457827007, "learning_rate": 0.0002750397871528801, "loss": 4.3429, "step": 2366 }, { "epoch": 2.71, "grad_norm": 1.3130379975055768, "learning_rate": 0.0002748184388306879, "loss": 4.1567, "step": 2367 }, { "epoch": 2.71, "grad_norm": 0.9066970511291104, "learning_rate": 0.00027459710431349264, "loss": 4.2985, "step": 2368 }, { "epoch": 2.71, "grad_norm": 1.7184326269758523, "learning_rate": 0.0002743757837226339, "loss": 4.2431, "step": 2369 }, { "epoch": 2.71, "grad_norm": 1.3206239079736553, "learning_rate": 0.00027415447717944383, "loss": 4.2916, "step": 2370 }, { "epoch": 2.71, "grad_norm": 4.3247156205440564, "learning_rate": 0.0002739331848052466, "loss": 4.3808, "step": 2371 }, { "epoch": 2.71, "grad_norm": 6.425953120992067, "learning_rate": 0.0002737119067213589, "loss": 4.5812, "step": 2372 }, { "epoch": 2.71, "grad_norm": 3.824791270227781, "learning_rate": 0.00027349064304908955, "loss": 4.4341, "step": 2373 }, { "epoch": 2.71, "grad_norm": 3.2426533258009074, "learning_rate": 0.00027326939390973917, "loss": 4.2938, "step": 2374 }, { "epoch": 2.71, "grad_norm": 5.425335011885779, "learning_rate": 0.0002730481594246007, "loss": 4.7417, "step": 2375 }, { "epoch": 2.72, "grad_norm": 4.8348985576551495, "learning_rate": 0.000272826939714959, "loss": 4.5723, "step": 2376 }, { "epoch": 2.72, "grad_norm": 1.393893246030523, "learning_rate": 0.0002726057349020908, "loss": 4.3729, "step": 2377 }, { "epoch": 2.72, "grad_norm": 2.1301721066781862, "learning_rate": 0.0002723845451072647, "loss": 4.55, "step": 2378 }, { "epoch": 2.72, "grad_norm": 2.0540487935948053, "learning_rate": 0.00027216337045174097, "loss": 4.6937, "step": 2379 }, { "epoch": 2.72, "grad_norm": 2.778671978797922, "learning_rate": 0.0002719422110567716, "loss": 4.6864, "step": 2380 }, { "epoch": 2.72, "grad_norm": 2.3553341591383727, "learning_rate": 0.00027172106704360043, "loss": 4.5567, "step": 2381 }, { "epoch": 2.72, "grad_norm": 0.7544938243284398, "learning_rate": 0.00027149993853346245, "loss": 4.1503, "step": 2382 }, { "epoch": 2.72, "grad_norm": 2.131178932833919, "learning_rate": 0.00027127882564758455, "loss": 4.4901, "step": 2383 }, { "epoch": 2.72, "grad_norm": 4.351914126894973, "learning_rate": 0.00027105772850718487, "loss": 4.4113, "step": 2384 }, { "epoch": 2.73, "grad_norm": 3.931524743641572, "learning_rate": 0.00027083664723347303, "loss": 4.5625, "step": 2385 }, { "epoch": 2.73, "grad_norm": 1.6062676130825801, "learning_rate": 0.00027061558194764967, "loss": 4.2058, "step": 2386 }, { "epoch": 2.73, "grad_norm": 1.4106702526256814, "learning_rate": 0.000270394532770907, "loss": 4.3231, "step": 2387 }, { "epoch": 2.73, "grad_norm": 2.672709758489334, "learning_rate": 0.00027017349982442823, "loss": 4.4051, "step": 2388 }, { "epoch": 2.73, "grad_norm": 2.5507906352031786, "learning_rate": 0.0002699524832293876, "loss": 4.3307, "step": 2389 }, { "epoch": 2.73, "grad_norm": 2.1536149680023255, "learning_rate": 0.0002697314831069508, "loss": 4.2658, "step": 2390 }, { "epoch": 2.73, "grad_norm": 1.3790333653791862, "learning_rate": 0.0002695104995782739, "loss": 4.2069, "step": 2391 }, { "epoch": 2.73, "grad_norm": 2.0087300257882044, "learning_rate": 0.0002692895327645042, "loss": 4.297, "step": 2392 }, { "epoch": 2.73, "grad_norm": 7.632478819434307, "learning_rate": 0.00026906858278677974, "loss": 4.168, "step": 2393 }, { "epoch": 2.74, "grad_norm": 4.626695969170174, "learning_rate": 0.00026884764976622965, "loss": 4.6988, "step": 2394 }, { "epoch": 2.74, "grad_norm": 8.288464344088812, "learning_rate": 0.00026862673382397316, "loss": 4.7142, "step": 2395 }, { "epoch": 2.74, "grad_norm": 3.622592165531978, "learning_rate": 0.0002684058350811206, "loss": 4.4142, "step": 2396 }, { "epoch": 2.74, "grad_norm": 1.4028413928306576, "learning_rate": 0.00026818495365877264, "loss": 4.3487, "step": 2397 }, { "epoch": 2.74, "grad_norm": 5.664336156744327, "learning_rate": 0.00026796408967802096, "loss": 4.5118, "step": 2398 }, { "epoch": 2.74, "grad_norm": 3.850895326149334, "learning_rate": 0.0002677432432599467, "loss": 4.6287, "step": 2399 }, { "epoch": 2.74, "grad_norm": 2.5905521515725654, "learning_rate": 0.0002675224145256223, "loss": 4.4235, "step": 2400 }, { "epoch": 2.74, "eval_blimp_filtered_avg": 0.5262686567164179, "eval_blimp_filtered_std": 0.005558351812312166, "step": 2400 }, { "epoch": 2.74, "eval_blimp_supplement_avg": 0.5280172413793104, "eval_blimp_supplement_std": 0.022810969291901215, "step": 2400 }, { "epoch": 2.74, "eval_vqa_filtered_avg": 0.36, "eval_vqa_filtered_std": 0.04824181513244218, "step": 2400 }, { "epoch": 2.74, "eval_winoground_filtered_avg": 0.55, "eval_winoground_filtered_std": 0.05, "step": 2400 }, { "epoch": 2.74, "grad_norm": 0.99824979014869, "learning_rate": 0.00026730160359611006, "loss": 4.1203, "step": 2401 }, { "epoch": 2.75, "grad_norm": 2.1707531964064097, "learning_rate": 0.0002670808105924627, "loss": 4.347, "step": 2402 }, { "epoch": 2.75, "grad_norm": 3.2177646473190618, "learning_rate": 0.0002668600356357229, "loss": 4.6087, "step": 2403 }, { "epoch": 2.75, "grad_norm": 3.3545682828580268, "learning_rate": 0.0002666392788469235, "loss": 4.5883, "step": 2404 }, { "epoch": 2.75, "grad_norm": 2.334560206009573, "learning_rate": 0.0002664185403470876, "loss": 4.4897, "step": 2405 }, { "epoch": 2.75, "grad_norm": 0.770936159004711, "learning_rate": 0.0002661978202572281, "loss": 4.1992, "step": 2406 }, { "epoch": 2.75, "grad_norm": 2.7368464459301234, "learning_rate": 0.00026597711869834795, "loss": 4.1563, "step": 2407 }, { "epoch": 2.75, "grad_norm": 2.8833880067523006, "learning_rate": 0.00026575643579143947, "loss": 4.3527, "step": 2408 }, { "epoch": 2.75, "grad_norm": 1.8571622653674449, "learning_rate": 0.00026553577165748537, "loss": 4.3097, "step": 2409 }, { "epoch": 2.75, "grad_norm": 1.1727395919291579, "learning_rate": 0.0002653151264174578, "loss": 4.4953, "step": 2410 }, { "epoch": 2.76, "grad_norm": 0.6886746921895335, "learning_rate": 0.0002650945001923185, "loss": 4.2927, "step": 2411 }, { "epoch": 2.76, "grad_norm": 1.373311796584587, "learning_rate": 0.00026487389310301884, "loss": 4.1877, "step": 2412 }, { "epoch": 2.76, "grad_norm": 1.4562892930366158, "learning_rate": 0.0002646533052704997, "loss": 4.6184, "step": 2413 }, { "epoch": 2.76, "grad_norm": 0.8639457733764047, "learning_rate": 0.00026443273681569135, "loss": 4.2612, "step": 2414 }, { "epoch": 2.76, "grad_norm": 0.7818623149849582, "learning_rate": 0.0002642121878595137, "loss": 4.6863, "step": 2415 }, { "epoch": 2.76, "grad_norm": 0.8231654771778978, "learning_rate": 0.00026399165852287543, "loss": 4.3108, "step": 2416 }, { "epoch": 2.76, "grad_norm": 0.716871118734228, "learning_rate": 0.0002637711489266751, "loss": 4.1962, "step": 2417 }, { "epoch": 2.76, "grad_norm": 1.9088942810638283, "learning_rate": 0.0002635506591917999, "loss": 4.4559, "step": 2418 }, { "epoch": 2.76, "grad_norm": 1.4820473127158955, "learning_rate": 0.00026333018943912665, "loss": 4.2945, "step": 2419 }, { "epoch": 2.77, "grad_norm": 0.9019388966757095, "learning_rate": 0.0002631097397895207, "loss": 4.2484, "step": 2420 }, { "epoch": 2.77, "grad_norm": 1.7424138103174842, "learning_rate": 0.00026288931036383666, "loss": 4.1204, "step": 2421 }, { "epoch": 2.77, "grad_norm": 1.331060428321565, "learning_rate": 0.0002626689012829181, "loss": 4.2375, "step": 2422 }, { "epoch": 2.77, "grad_norm": 0.9913597042339909, "learning_rate": 0.0002624485126675973, "loss": 4.4649, "step": 2423 }, { "epoch": 2.77, "grad_norm": 0.5529213745470625, "learning_rate": 0.0002622281446386954, "loss": 4.3446, "step": 2424 }, { "epoch": 2.77, "grad_norm": 0.8480828302347831, "learning_rate": 0.0002620077973170222, "loss": 4.3045, "step": 2425 }, { "epoch": 2.77, "grad_norm": 1.0283122369122324, "learning_rate": 0.00026178747082337614, "loss": 4.0179, "step": 2426 }, { "epoch": 2.77, "grad_norm": 1.049479911725009, "learning_rate": 0.0002615671652785443, "loss": 4.4121, "step": 2427 }, { "epoch": 2.77, "grad_norm": 0.603630834514023, "learning_rate": 0.0002613468808033023, "loss": 4.2332, "step": 2428 }, { "epoch": 2.78, "grad_norm": 0.9977057004564149, "learning_rate": 0.0002611266175184141, "loss": 4.2668, "step": 2429 }, { "epoch": 2.78, "grad_norm": 1.0664215053262873, "learning_rate": 0.000260906375544632, "loss": 4.3656, "step": 2430 }, { "epoch": 2.78, "grad_norm": 0.6881169721941325, "learning_rate": 0.0002606861550026969, "loss": 4.2123, "step": 2431 }, { "epoch": 2.78, "grad_norm": 1.3204197837360618, "learning_rate": 0.0002604659560133377, "loss": 4.1416, "step": 2432 }, { "epoch": 2.78, "grad_norm": 1.2574998432535367, "learning_rate": 0.0002602457786972716, "loss": 4.2291, "step": 2433 }, { "epoch": 2.78, "grad_norm": 0.5554443425465179, "learning_rate": 0.0002600256231752037, "loss": 4.1614, "step": 2434 }, { "epoch": 2.78, "grad_norm": 0.6407737169777484, "learning_rate": 0.00025980548956782746, "loss": 4.3533, "step": 2435 }, { "epoch": 2.78, "grad_norm": 7.827846926145454, "learning_rate": 0.00025958537799582426, "loss": 4.2396, "step": 2436 }, { "epoch": 2.79, "grad_norm": 4.478601082166011, "learning_rate": 0.00025936528857986314, "loss": 4.5495, "step": 2437 }, { "epoch": 2.79, "grad_norm": 4.4535065890655, "learning_rate": 0.00025914522144060133, "loss": 4.5321, "step": 2438 }, { "epoch": 2.79, "grad_norm": 2.2453028569480487, "learning_rate": 0.00025892517669868364, "loss": 4.626, "step": 2439 }, { "epoch": 2.79, "grad_norm": 1.1789937223459397, "learning_rate": 0.0002587051544747426, "loss": 4.4243, "step": 2440 }, { "epoch": 2.79, "grad_norm": 1.7028958252683373, "learning_rate": 0.0002584851548893987, "loss": 4.4861, "step": 2441 }, { "epoch": 2.79, "grad_norm": 2.5051429208959295, "learning_rate": 0.0002582651780632595, "loss": 4.2952, "step": 2442 }, { "epoch": 2.79, "grad_norm": 3.2636528777860927, "learning_rate": 0.0002580452241169204, "loss": 4.2502, "step": 2443 }, { "epoch": 2.79, "grad_norm": 2.221441870911716, "learning_rate": 0.00025782529317096433, "loss": 4.3699, "step": 2444 }, { "epoch": 2.79, "grad_norm": 0.6412057015144468, "learning_rate": 0.00025760538534596157, "loss": 4.2175, "step": 2445 }, { "epoch": 2.8, "grad_norm": 2.2124456085358624, "learning_rate": 0.0002573855007624694, "loss": 4.368, "step": 2446 }, { "epoch": 2.8, "grad_norm": 3.3088602647083825, "learning_rate": 0.00025716563954103266, "loss": 4.4485, "step": 2447 }, { "epoch": 2.8, "grad_norm": 2.170421206099158, "learning_rate": 0.0002569458018021835, "loss": 4.4699, "step": 2448 }, { "epoch": 2.8, "grad_norm": 0.8369327027412495, "learning_rate": 0.00025672598766644106, "loss": 4.3763, "step": 2449 }, { "epoch": 2.8, "grad_norm": 0.938364076776348, "learning_rate": 0.0002565061972543113, "loss": 4.4062, "step": 2450 }, { "epoch": 2.8, "grad_norm": 1.8373194593740894, "learning_rate": 0.0002562864306862873, "loss": 4.1043, "step": 2451 }, { "epoch": 2.8, "grad_norm": 11.677280012197496, "learning_rate": 0.00025606668808284943, "loss": 4.3996, "step": 2452 }, { "epoch": 2.8, "grad_norm": 1.2518797258684893, "learning_rate": 0.0002558469695644647, "loss": 4.4345, "step": 2453 }, { "epoch": 2.8, "grad_norm": 2.347827295640945, "learning_rate": 0.0002556272752515865, "loss": 4.6806, "step": 2454 }, { "epoch": 2.81, "grad_norm": 2.2791294899446446, "learning_rate": 0.0002554076052646555, "loss": 4.783, "step": 2455 }, { "epoch": 2.81, "grad_norm": 2.0825124315075594, "learning_rate": 0.0002551879597240989, "loss": 4.7617, "step": 2456 }, { "epoch": 2.81, "grad_norm": 1.2801136521118432, "learning_rate": 0.00025496833875033047, "loss": 4.6163, "step": 2457 }, { "epoch": 2.81, "grad_norm": 0.8408972045096217, "learning_rate": 0.0002547487424637502, "loss": 4.8371, "step": 2458 }, { "epoch": 2.81, "grad_norm": 1.3390657051676296, "learning_rate": 0.00025452917098474516, "loss": 4.6053, "step": 2459 }, { "epoch": 2.81, "grad_norm": 1.2518553489021658, "learning_rate": 0.0002543096244336884, "loss": 4.5719, "step": 2460 }, { "epoch": 2.81, "grad_norm": 1.2547823688556723, "learning_rate": 0.0002540901029309392, "loss": 4.7789, "step": 2461 }, { "epoch": 2.81, "grad_norm": 1.4466436775581233, "learning_rate": 0.00025387060659684367, "loss": 4.6624, "step": 2462 }, { "epoch": 2.81, "grad_norm": 1.7510828498938955, "learning_rate": 0.0002536511355517334, "loss": 4.5877, "step": 2463 }, { "epoch": 2.82, "grad_norm": 1.5852186617651742, "learning_rate": 0.0002534316899159266, "loss": 4.6253, "step": 2464 }, { "epoch": 2.82, "grad_norm": 2.1523198590744084, "learning_rate": 0.0002532122698097275, "loss": 4.6131, "step": 2465 }, { "epoch": 2.82, "grad_norm": 2.068884267196014, "learning_rate": 0.00025299287535342625, "loss": 4.5747, "step": 2466 }, { "epoch": 2.82, "grad_norm": 1.265330236177896, "learning_rate": 0.00025277350666729875, "loss": 4.6157, "step": 2467 }, { "epoch": 2.82, "grad_norm": 0.8685165177456566, "learning_rate": 0.00025255416387160714, "loss": 4.5911, "step": 2468 }, { "epoch": 2.82, "grad_norm": 1.4462807825674788, "learning_rate": 0.0002523348470865991, "loss": 4.6345, "step": 2469 }, { "epoch": 2.82, "grad_norm": 1.7974338514807957, "learning_rate": 0.0002521155564325083, "loss": 4.4022, "step": 2470 }, { "epoch": 2.82, "grad_norm": 1.4327295852652384, "learning_rate": 0.00025189629202955374, "loss": 4.5031, "step": 2471 }, { "epoch": 2.83, "grad_norm": 0.874255317510061, "learning_rate": 0.00025167705399794035, "loss": 4.4118, "step": 2472 }, { "epoch": 2.83, "grad_norm": 0.9919097691232142, "learning_rate": 0.0002514578424578583, "loss": 4.5023, "step": 2473 }, { "epoch": 2.83, "grad_norm": 3.037123683953898, "learning_rate": 0.00025123865752948364, "loss": 4.4696, "step": 2474 }, { "epoch": 2.83, "grad_norm": 0.9323962611367328, "learning_rate": 0.0002510194993329774, "loss": 4.4952, "step": 2475 }, { "epoch": 2.83, "grad_norm": 0.6230990706496498, "learning_rate": 0.0002508003679884862, "loss": 4.2457, "step": 2476 }, { "epoch": 2.83, "grad_norm": 1.3375277448145781, "learning_rate": 0.00025058126361614193, "loss": 4.3794, "step": 2477 }, { "epoch": 2.83, "grad_norm": 1.1494561526114837, "learning_rate": 0.0002503621863360616, "loss": 4.3924, "step": 2478 }, { "epoch": 2.83, "grad_norm": 0.824463389480698, "learning_rate": 0.00025014313626834755, "loss": 4.23, "step": 2479 }, { "epoch": 2.83, "grad_norm": 1.2055468441140331, "learning_rate": 0.0002499241135330869, "loss": 4.4901, "step": 2480 }, { "epoch": 2.84, "grad_norm": 0.8412227814348342, "learning_rate": 0.000249705118250352, "loss": 4.2681, "step": 2481 }, { "epoch": 2.84, "grad_norm": 0.5708481463921478, "learning_rate": 0.00024948615054020025, "loss": 4.2648, "step": 2482 }, { "epoch": 2.84, "grad_norm": 6.758569382516103, "learning_rate": 0.00024926721052267363, "loss": 4.244, "step": 2483 }, { "epoch": 2.84, "grad_norm": 2.811911989934368, "learning_rate": 0.00024904829831779915, "loss": 4.6532, "step": 2484 }, { "epoch": 2.84, "grad_norm": 1.351155928990301, "learning_rate": 0.00024882941404558856, "loss": 4.4838, "step": 2485 }, { "epoch": 2.84, "grad_norm": 0.6520006368457554, "learning_rate": 0.00024861055782603815, "loss": 4.3348, "step": 2486 }, { "epoch": 2.84, "grad_norm": 1.4801960946219987, "learning_rate": 0.00024839172977912915, "loss": 4.6513, "step": 2487 }, { "epoch": 2.84, "grad_norm": 1.5520826948168076, "learning_rate": 0.00024817293002482696, "loss": 4.5324, "step": 2488 }, { "epoch": 2.84, "grad_norm": 1.1523930399562032, "learning_rate": 0.00024795415868308164, "loss": 4.4049, "step": 2489 }, { "epoch": 2.85, "grad_norm": 0.7278089850771241, "learning_rate": 0.00024773541587382774, "loss": 4.4099, "step": 2490 }, { "epoch": 2.85, "grad_norm": 0.8520056265238279, "learning_rate": 0.00024751670171698425, "loss": 4.4215, "step": 2491 }, { "epoch": 2.85, "grad_norm": 1.0690514262090678, "learning_rate": 0.000247298016332454, "loss": 4.4654, "step": 2492 }, { "epoch": 2.85, "grad_norm": 0.6582093377940849, "learning_rate": 0.0002470793598401246, "loss": 4.3856, "step": 2493 }, { "epoch": 2.85, "grad_norm": 0.4650402367175494, "learning_rate": 0.00024686073235986736, "loss": 4.4173, "step": 2494 }, { "epoch": 2.85, "grad_norm": 0.5550051698566519, "learning_rate": 0.00024664213401153813, "loss": 4.498, "step": 2495 }, { "epoch": 2.85, "grad_norm": 0.8802403572982475, "learning_rate": 0.0002464235649149766, "loss": 4.1077, "step": 2496 }, { "epoch": 2.85, "grad_norm": 0.6562823599233069, "learning_rate": 0.00024620502519000616, "loss": 4.3583, "step": 2497 }, { "epoch": 2.85, "grad_norm": 0.48291473954310066, "learning_rate": 0.00024598651495643433, "loss": 4.359, "step": 2498 }, { "epoch": 2.86, "grad_norm": 0.8845710443897843, "learning_rate": 0.00024576803433405263, "loss": 4.2643, "step": 2499 }, { "epoch": 2.86, "grad_norm": 0.8606982636574948, "learning_rate": 0.00024554958344263623, "loss": 4.4885, "step": 2500 }, { "epoch": 2.86, "eval_blimp_filtered_avg": 0.5313432835820896, "eval_blimp_filtered_std": 0.005502651952302882, "step": 2500 }, { "epoch": 2.86, "eval_blimp_supplement_avg": 0.540948275862069, "eval_blimp_supplement_std": 0.022776400906345202, "step": 2500 }, { "epoch": 2.86, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 2500 }, { "epoch": 2.86, "eval_winoground_filtered_avg": 0.52, "eval_winoground_filtered_std": 0.05021167315686779, "step": 2500 }, { "epoch": 2.86, "grad_norm": 0.5651469465697178, "learning_rate": 0.00024533116240194356, "loss": 4.1912, "step": 2501 }, { "epoch": 2.86, "grad_norm": 1.14749751983357, "learning_rate": 0.0002451127713317174, "loss": 4.229, "step": 2502 }, { "epoch": 2.86, "grad_norm": 0.8241128709779351, "learning_rate": 0.0002448944103516836, "loss": 4.3691, "step": 2503 }, { "epoch": 2.86, "grad_norm": 0.5204256856361168, "learning_rate": 0.000244676079581552, "loss": 4.1866, "step": 2504 }, { "epoch": 2.86, "grad_norm": 0.9138386358067094, "learning_rate": 0.0002444577791410151, "loss": 4.1387, "step": 2505 }, { "epoch": 2.86, "grad_norm": 0.7633904432965017, "learning_rate": 0.00024423950914974953, "loss": 4.3597, "step": 2506 }, { "epoch": 2.87, "grad_norm": 0.5535881430660151, "learning_rate": 0.00024402126972741483, "loss": 4.3361, "step": 2507 }, { "epoch": 2.87, "grad_norm": 0.9553212589826067, "learning_rate": 0.000243803060993654, "loss": 4.2876, "step": 2508 }, { "epoch": 2.87, "grad_norm": 1.001626313172623, "learning_rate": 0.00024358488306809288, "loss": 4.3321, "step": 2509 }, { "epoch": 2.87, "grad_norm": 0.8562308593409345, "learning_rate": 0.00024336673607034076, "loss": 4.2753, "step": 2510 }, { "epoch": 2.87, "grad_norm": 0.7068258144551033, "learning_rate": 0.0002431486201199899, "loss": 4.2083, "step": 2511 }, { "epoch": 2.87, "grad_norm": 0.7381055658211995, "learning_rate": 0.00024293053533661543, "loss": 4.1509, "step": 2512 }, { "epoch": 2.87, "grad_norm": 0.6004570795659198, "learning_rate": 0.00024271248183977556, "loss": 4.2049, "step": 2513 }, { "epoch": 2.87, "grad_norm": 1.0525446924899813, "learning_rate": 0.00024249445974901108, "loss": 4.3442, "step": 2514 }, { "epoch": 2.87, "grad_norm": 0.5799693014198636, "learning_rate": 0.00024227646918384576, "loss": 4.2701, "step": 2515 }, { "epoch": 2.88, "grad_norm": 0.4880977986877435, "learning_rate": 0.00024205851026378613, "loss": 4.1782, "step": 2516 }, { "epoch": 2.88, "grad_norm": 0.5859847907237523, "learning_rate": 0.00024184058310832134, "loss": 4.361, "step": 2517 }, { "epoch": 2.88, "grad_norm": 0.7466025025315636, "learning_rate": 0.00024162268783692296, "loss": 4.193, "step": 2518 }, { "epoch": 2.88, "grad_norm": 0.9549652036442368, "learning_rate": 0.0002414048245690452, "loss": 4.1619, "step": 2519 }, { "epoch": 2.88, "grad_norm": 0.4468170790229295, "learning_rate": 0.00024118699342412479, "loss": 4.281, "step": 2520 }, { "epoch": 2.88, "grad_norm": 0.8571379079232043, "learning_rate": 0.00024096919452158087, "loss": 4.339, "step": 2521 }, { "epoch": 2.88, "grad_norm": 0.6737576755736234, "learning_rate": 0.00024075142798081457, "loss": 4.3447, "step": 2522 }, { "epoch": 2.88, "grad_norm": 0.8879965708655246, "learning_rate": 0.00024053369392120974, "loss": 4.2534, "step": 2523 }, { "epoch": 2.88, "grad_norm": 0.8479273253744606, "learning_rate": 0.00024031599246213212, "loss": 4.2303, "step": 2524 }, { "epoch": 2.89, "grad_norm": 0.4698391717351646, "learning_rate": 0.00024009832372292984, "loss": 4.1858, "step": 2525 }, { "epoch": 2.89, "grad_norm": 1.2901717077375932, "learning_rate": 0.00023988068782293266, "loss": 4.1041, "step": 2526 }, { "epoch": 2.89, "grad_norm": 0.7083022794606397, "learning_rate": 0.00023966308488145274, "loss": 4.1434, "step": 2527 }, { "epoch": 2.89, "grad_norm": 0.7011069805650739, "learning_rate": 0.000239445515017784, "loss": 4.2788, "step": 2528 }, { "epoch": 2.89, "grad_norm": 1.1421536150407554, "learning_rate": 0.0002392279783512023, "loss": 4.2421, "step": 2529 }, { "epoch": 2.89, "grad_norm": 0.5875615755101561, "learning_rate": 0.0002390104750009653, "loss": 4.1006, "step": 2530 }, { "epoch": 2.89, "grad_norm": 1.1268722681188232, "learning_rate": 0.0002387930050863122, "loss": 4.398, "step": 2531 }, { "epoch": 2.89, "grad_norm": 0.6222772437799192, "learning_rate": 0.00023857556872646413, "loss": 4.1596, "step": 2532 }, { "epoch": 2.89, "grad_norm": 0.6338559472716655, "learning_rate": 0.00023835816604062367, "loss": 4.2389, "step": 2533 }, { "epoch": 2.9, "grad_norm": 1.0666248549531778, "learning_rate": 0.00023814079714797512, "loss": 4.2561, "step": 2534 }, { "epoch": 2.9, "grad_norm": 0.5946667373598951, "learning_rate": 0.00023792346216768394, "loss": 4.266, "step": 2535 }, { "epoch": 2.9, "grad_norm": 0.5122066332603631, "learning_rate": 0.00023770616121889724, "loss": 4.2613, "step": 2536 }, { "epoch": 2.9, "grad_norm": 0.6377780125525387, "learning_rate": 0.0002374888944207434, "loss": 4.1513, "step": 2537 }, { "epoch": 2.9, "grad_norm": 0.6721856454702817, "learning_rate": 0.00023727166189233228, "loss": 4.2296, "step": 2538 }, { "epoch": 2.9, "grad_norm": 0.5696060569492103, "learning_rate": 0.00023705446375275448, "loss": 4.2158, "step": 2539 }, { "epoch": 2.9, "grad_norm": 0.773353550327232, "learning_rate": 0.00023683730012108217, "loss": 4.1747, "step": 2540 }, { "epoch": 2.9, "grad_norm": 1.5065550956301166, "learning_rate": 0.00023662017111636838, "loss": 4.245, "step": 2541 }, { "epoch": 2.91, "grad_norm": 1.0783950898470078, "learning_rate": 0.0002364030768576475, "loss": 4.2991, "step": 2542 }, { "epoch": 2.91, "grad_norm": 1.263051871743404, "learning_rate": 0.0002361860174639343, "loss": 4.1776, "step": 2543 }, { "epoch": 2.91, "grad_norm": 0.6999807348176544, "learning_rate": 0.00023596899305422487, "loss": 4.4206, "step": 2544 }, { "epoch": 2.91, "grad_norm": 1.614169177116863, "learning_rate": 0.00023575200374749593, "loss": 4.1526, "step": 2545 }, { "epoch": 2.91, "grad_norm": 0.9961232909655852, "learning_rate": 0.0002355350496627053, "loss": 4.3485, "step": 2546 }, { "epoch": 2.91, "grad_norm": 0.5658671883088607, "learning_rate": 0.0002353181309187909, "loss": 4.3091, "step": 2547 }, { "epoch": 2.91, "grad_norm": 0.7423701182339253, "learning_rate": 0.0002351012476346716, "loss": 4.4892, "step": 2548 }, { "epoch": 2.91, "grad_norm": 1.038336311941712, "learning_rate": 0.00023488439992924704, "loss": 4.2157, "step": 2549 }, { "epoch": 2.91, "grad_norm": 0.6920914073037782, "learning_rate": 0.00023466758792139696, "loss": 4.2824, "step": 2550 }, { "epoch": 2.92, "grad_norm": 0.7356411953829696, "learning_rate": 0.00023445081172998193, "loss": 4.2419, "step": 2551 }, { "epoch": 2.92, "grad_norm": 0.6502002009118366, "learning_rate": 0.00023423407147384233, "loss": 4.1811, "step": 2552 }, { "epoch": 2.92, "grad_norm": 0.6672266588624904, "learning_rate": 0.00023401736727179936, "loss": 4.1375, "step": 2553 }, { "epoch": 2.92, "grad_norm": 0.577132639609706, "learning_rate": 0.00023380069924265433, "loss": 4.1765, "step": 2554 }, { "epoch": 2.92, "grad_norm": 1.262287404435852, "learning_rate": 0.00023358406750518865, "loss": 4.3358, "step": 2555 }, { "epoch": 2.92, "grad_norm": 0.5813001231482584, "learning_rate": 0.00023336747217816354, "loss": 4.0951, "step": 2556 }, { "epoch": 2.92, "grad_norm": 0.8142413040340654, "learning_rate": 0.00023315091338032087, "loss": 4.2364, "step": 2557 }, { "epoch": 2.92, "grad_norm": 0.6536407916309178, "learning_rate": 0.0002329343912303821, "loss": 4.3941, "step": 2558 }, { "epoch": 2.92, "grad_norm": 0.8187149026569829, "learning_rate": 0.00023271790584704873, "loss": 4.2477, "step": 2559 }, { "epoch": 2.93, "grad_norm": 0.51777212786076, "learning_rate": 0.00023250145734900185, "loss": 4.1844, "step": 2560 }, { "epoch": 2.93, "grad_norm": 0.7599364493390554, "learning_rate": 0.00023228504585490268, "loss": 3.9695, "step": 2561 }, { "epoch": 2.93, "grad_norm": 1.008857826506059, "learning_rate": 0.000232068671483392, "loss": 4.2541, "step": 2562 }, { "epoch": 2.93, "grad_norm": 0.9374779660914048, "learning_rate": 0.0002318523343530902, "loss": 4.3179, "step": 2563 }, { "epoch": 2.93, "grad_norm": 1.806498926719277, "learning_rate": 0.00023163603458259725, "loss": 4.2933, "step": 2564 }, { "epoch": 2.93, "grad_norm": 1.0956353073645593, "learning_rate": 0.00023141977229049272, "loss": 4.5247, "step": 2565 }, { "epoch": 2.93, "grad_norm": 0.9079718685868076, "learning_rate": 0.0002312035475953356, "loss": 4.2378, "step": 2566 }, { "epoch": 2.93, "grad_norm": 0.46063949434249796, "learning_rate": 0.00023098736061566422, "loss": 4.042, "step": 2567 }, { "epoch": 2.93, "grad_norm": 1.3294150048550546, "learning_rate": 0.0002307712114699964, "loss": 4.0306, "step": 2568 }, { "epoch": 2.94, "grad_norm": 1.3650402565694655, "learning_rate": 0.0002305551002768289, "loss": 4.2429, "step": 2569 }, { "epoch": 2.94, "grad_norm": 0.6161399296902454, "learning_rate": 0.00023033902715463803, "loss": 4.2256, "step": 2570 }, { "epoch": 2.94, "grad_norm": 1.9331703347458324, "learning_rate": 0.00023012299222187896, "loss": 4.3089, "step": 2571 }, { "epoch": 2.94, "grad_norm": 1.146545007594941, "learning_rate": 0.00022990699559698614, "loss": 4.0443, "step": 2572 }, { "epoch": 2.94, "grad_norm": 1.1751121481909939, "learning_rate": 0.0002296910373983728, "loss": 4.2889, "step": 2573 }, { "epoch": 2.94, "grad_norm": 1.9203205725349273, "learning_rate": 0.00022947511774443126, "loss": 4.2464, "step": 2574 }, { "epoch": 2.94, "grad_norm": 0.6017249705186978, "learning_rate": 0.00022925923675353263, "loss": 4.2374, "step": 2575 }, { "epoch": 2.94, "grad_norm": 1.6875214362325415, "learning_rate": 0.00022904339454402693, "loss": 4.2171, "step": 2576 }, { "epoch": 2.95, "grad_norm": 1.3721523367939834, "learning_rate": 0.00022882759123424277, "loss": 4.2348, "step": 2577 }, { "epoch": 2.95, "grad_norm": 0.6422949860422037, "learning_rate": 0.00022861182694248753, "loss": 4.0803, "step": 2578 }, { "epoch": 2.95, "grad_norm": 1.286854686785068, "learning_rate": 0.0002283961017870472, "loss": 4.3155, "step": 2579 }, { "epoch": 2.95, "grad_norm": 2.2035783891425007, "learning_rate": 0.00022818041588618636, "loss": 4.2553, "step": 2580 }, { "epoch": 2.95, "grad_norm": 0.699275702023369, "learning_rate": 0.00022796476935814784, "loss": 4.0383, "step": 2581 }, { "epoch": 2.95, "grad_norm": 1.3587234727586166, "learning_rate": 0.00022774916232115312, "loss": 4.2491, "step": 2582 }, { "epoch": 2.95, "grad_norm": 1.501453398333668, "learning_rate": 0.000227533594893402, "loss": 4.3552, "step": 2583 }, { "epoch": 2.95, "grad_norm": 12.560714816003086, "learning_rate": 0.00022731806719307246, "loss": 4.3116, "step": 2584 }, { "epoch": 2.95, "grad_norm": 4.302987805636268, "learning_rate": 0.00022710257933832094, "loss": 4.6409, "step": 2585 }, { "epoch": 2.96, "grad_norm": 3.523318009111459, "learning_rate": 0.00022688713144728163, "loss": 4.4539, "step": 2586 }, { "epoch": 2.96, "grad_norm": 1.447460117349026, "learning_rate": 0.00022667172363806718, "loss": 4.6503, "step": 2587 }, { "epoch": 2.96, "grad_norm": 1.9855463787455052, "learning_rate": 0.000226456356028768, "loss": 4.7454, "step": 2588 }, { "epoch": 2.96, "grad_norm": 3.529293789336613, "learning_rate": 0.00022624102873745287, "loss": 4.4759, "step": 2589 }, { "epoch": 2.96, "grad_norm": 1.334558072473303, "learning_rate": 0.00022602574188216787, "loss": 4.3868, "step": 2590 }, { "epoch": 2.96, "grad_norm": 1.9607808878570054, "learning_rate": 0.00022581049558093735, "loss": 4.3096, "step": 2591 }, { "epoch": 2.96, "grad_norm": 3.168540936643506, "learning_rate": 0.00022559528995176324, "loss": 4.3386, "step": 2592 }, { "epoch": 2.96, "grad_norm": 1.1514301601909844, "learning_rate": 0.00022538012511262554, "loss": 4.3142, "step": 2593 }, { "epoch": 2.96, "grad_norm": 3.0339426434520007, "learning_rate": 0.00022516500118148125, "loss": 4.3733, "step": 2594 }, { "epoch": 2.97, "grad_norm": 1.6850221192288715, "learning_rate": 0.0002249499182762653, "loss": 4.2383, "step": 2595 }, { "epoch": 2.97, "grad_norm": 1.3541074559284516, "learning_rate": 0.0002247348765148903, "loss": 4.2689, "step": 2596 }, { "epoch": 2.97, "grad_norm": 1.872179566716467, "learning_rate": 0.00022451987601524614, "loss": 4.453, "step": 2597 }, { "epoch": 2.97, "grad_norm": 1.4706715133882597, "learning_rate": 0.00022430491689519986, "loss": 4.033, "step": 2598 }, { "epoch": 2.97, "grad_norm": 1.1450198748905875, "learning_rate": 0.00022408999927259597, "loss": 4.3444, "step": 2599 }, { "epoch": 2.97, "grad_norm": 1.0701641251360345, "learning_rate": 0.00022387512326525654, "loss": 4.2313, "step": 2600 }, { "epoch": 2.97, "eval_blimp_filtered_avg": 0.5455223880597015, "eval_blimp_filtered_std": 0.005564077161698807, "step": 2600 }, { "epoch": 2.97, "eval_blimp_supplement_avg": 0.5387931034482759, "eval_blimp_supplement_std": 0.0227509399822816, "step": 2600 }, { "epoch": 2.97, "eval_vqa_filtered_avg": 0.37, "eval_vqa_filtered_std": 0.048523658709391, "step": 2600 }, { "epoch": 2.97, "eval_winoground_filtered_avg": 0.48, "eval_winoground_filtered_std": 0.05021167315686779, "step": 2600 }, { "epoch": 2.97, "grad_norm": 0.8727229357878289, "learning_rate": 0.0002236602889909804, "loss": 4.5116, "step": 2601 }, { "epoch": 2.97, "grad_norm": 0.9244721871122329, "learning_rate": 0.00022344549656754388, "loss": 4.1678, "step": 2602 }, { "epoch": 2.97, "grad_norm": 0.9385567240041308, "learning_rate": 0.0002232307461126998, "loss": 3.9222, "step": 2603 }, { "epoch": 2.98, "grad_norm": 1.5224162654985514, "learning_rate": 0.00022301603774417856, "loss": 4.228, "step": 2604 }, { "epoch": 2.98, "grad_norm": 1.0765719130203972, "learning_rate": 0.0002228013715796872, "loss": 4.1071, "step": 2605 }, { "epoch": 2.98, "grad_norm": 0.7413854552588149, "learning_rate": 0.0002225867477369097, "loss": 4.2614, "step": 2606 }, { "epoch": 2.98, "grad_norm": 1.1346633351640534, "learning_rate": 0.0002223721663335068, "loss": 4.1651, "step": 2607 }, { "epoch": 2.98, "grad_norm": 1.0560600309340842, "learning_rate": 0.00022215762748711593, "loss": 3.9808, "step": 2608 }, { "epoch": 2.98, "grad_norm": 1.1497443992074179, "learning_rate": 0.00022194313131535133, "loss": 4.3269, "step": 2609 }, { "epoch": 2.98, "grad_norm": 1.4722261049234469, "learning_rate": 0.00022172867793580374, "loss": 4.2138, "step": 2610 }, { "epoch": 2.98, "grad_norm": 1.5910058756181615, "learning_rate": 0.00022151426746604044, "loss": 4.3151, "step": 2611 }, { "epoch": 2.99, "grad_norm": 0.8856523009462958, "learning_rate": 0.00022129990002360523, "loss": 4.2509, "step": 2612 }, { "epoch": 2.99, "grad_norm": 1.5227832889412158, "learning_rate": 0.0002210855757260183, "loss": 4.257, "step": 2613 }, { "epoch": 2.99, "grad_norm": 1.4809934748197426, "learning_rate": 0.00022087129469077624, "loss": 4.2507, "step": 2614 }, { "epoch": 2.99, "grad_norm": 1.194483785462596, "learning_rate": 0.00022065705703535175, "loss": 4.2309, "step": 2615 }, { "epoch": 2.99, "grad_norm": 1.901556622663321, "learning_rate": 0.00022044286287719393, "loss": 4.4746, "step": 2616 }, { "epoch": 2.99, "grad_norm": 0.9241563786779986, "learning_rate": 0.00022022871233372797, "loss": 4.2084, "step": 2617 }, { "epoch": 2.99, "grad_norm": 1.3324927865244371, "learning_rate": 0.00022001460552235517, "loss": 4.3767, "step": 2618 }, { "epoch": 2.99, "grad_norm": 0.8992921493918756, "learning_rate": 0.00021980054256045292, "loss": 4.3004, "step": 2619 }, { "epoch": 2.99, "grad_norm": 1.1697682318310916, "learning_rate": 0.00021958652356537426, "loss": 4.3022, "step": 2620 }, { "epoch": 3.0, "grad_norm": 1.4984140062763884, "learning_rate": 0.00021937254865444855, "loss": 4.2635, "step": 2621 }, { "epoch": 3.0, "grad_norm": 0.7280864180126836, "learning_rate": 0.00021915861794498073, "loss": 4.446, "step": 2622 }, { "epoch": 3.0, "grad_norm": 1.0087830658007348, "learning_rate": 0.00021894473155425166, "loss": 4.2718, "step": 2623 }, { "epoch": 3.0, "grad_norm": 0.9629618155233188, "learning_rate": 0.0002187308895995176, "loss": 4.2258, "step": 2624 }, { "epoch": 3.0, "grad_norm": 0.7353616869265273, "learning_rate": 0.00021851709219801078, "loss": 4.2871, "step": 2625 }, { "epoch": 3.0, "grad_norm": 0.7495729052643275, "learning_rate": 0.0002183033394669389, "loss": 4.0758, "step": 2626 }, { "epoch": 3.0, "grad_norm": 1.7158592969252013, "learning_rate": 0.00021808963152348518, "loss": 3.9756, "step": 2627 }, { "epoch": 3.0, "grad_norm": 0.5638799552630359, "learning_rate": 0.00021787596848480816, "loss": 4.2823, "step": 2628 }, { "epoch": 3.0, "grad_norm": 1.7890001682828334, "learning_rate": 0.00021766235046804193, "loss": 4.0262, "step": 2629 }, { "epoch": 3.01, "grad_norm": 0.4942836086737641, "learning_rate": 0.00021744877759029583, "loss": 3.9777, "step": 2630 }, { "epoch": 3.01, "grad_norm": 0.8135541961657559, "learning_rate": 0.00021723524996865448, "loss": 4.1506, "step": 2631 }, { "epoch": 3.01, "grad_norm": 1.1664684631955407, "learning_rate": 0.00021702176772017757, "loss": 4.2646, "step": 2632 }, { "epoch": 3.01, "grad_norm": 0.8293682398811614, "learning_rate": 0.00021680833096190009, "loss": 4.0576, "step": 2633 }, { "epoch": 3.01, "grad_norm": 0.4927010229853246, "learning_rate": 0.00021659493981083201, "loss": 4.0795, "step": 2634 }, { "epoch": 3.01, "grad_norm": 0.6424839655986803, "learning_rate": 0.0002163815943839583, "loss": 4.2375, "step": 2635 }, { "epoch": 3.01, "grad_norm": 2.108226246900263, "learning_rate": 0.0002161682947982389, "loss": 4.2053, "step": 2636 }, { "epoch": 3.01, "grad_norm": 0.792232717440733, "learning_rate": 0.00021595504117060844, "loss": 4.2079, "step": 2637 }, { "epoch": 3.01, "grad_norm": 1.2774698437159082, "learning_rate": 0.00021574183361797652, "loss": 4.2522, "step": 2638 }, { "epoch": 3.02, "grad_norm": 1.4542567917906226, "learning_rate": 0.00021552867225722763, "loss": 4.1341, "step": 2639 }, { "epoch": 3.02, "grad_norm": 0.6800197798831682, "learning_rate": 0.0002153155572052208, "loss": 4.3572, "step": 2640 }, { "epoch": 3.02, "grad_norm": 1.4070728368563141, "learning_rate": 0.00021510248857878938, "loss": 4.0916, "step": 2641 }, { "epoch": 3.02, "grad_norm": 1.3117404605045788, "learning_rate": 0.00021488946649474153, "loss": 4.3946, "step": 2642 }, { "epoch": 3.02, "grad_norm": 0.7358254102278927, "learning_rate": 0.0002146764910698601, "loss": 4.1967, "step": 2643 }, { "epoch": 3.02, "grad_norm": 1.214865216623838, "learning_rate": 0.0002144635624209022, "loss": 4.0662, "step": 2644 }, { "epoch": 3.02, "grad_norm": 1.6759751989953697, "learning_rate": 0.00021425068066459896, "loss": 4.2703, "step": 2645 }, { "epoch": 3.02, "grad_norm": 0.6881551204194081, "learning_rate": 0.00021403784591765616, "loss": 4.0712, "step": 2646 }, { "epoch": 3.03, "grad_norm": 1.2131652979686258, "learning_rate": 0.00021382505829675388, "loss": 4.3338, "step": 2647 }, { "epoch": 3.03, "grad_norm": 2.039979036730362, "learning_rate": 0.00021361231791854635, "loss": 4.3175, "step": 2648 }, { "epoch": 3.03, "grad_norm": 0.8060956063803566, "learning_rate": 0.00021339962489966136, "loss": 4.2945, "step": 2649 }, { "epoch": 3.03, "grad_norm": 1.0289587912320695, "learning_rate": 0.0002131869793567014, "loss": 4.1901, "step": 2650 }, { "epoch": 3.03, "grad_norm": 2.1665868182128447, "learning_rate": 0.00021297438140624266, "loss": 4.1961, "step": 2651 }, { "epoch": 3.03, "grad_norm": 1.3611300667508444, "learning_rate": 0.00021276183116483542, "loss": 4.2938, "step": 2652 }, { "epoch": 3.03, "grad_norm": 0.6176308881777606, "learning_rate": 0.00021254932874900325, "loss": 4.3531, "step": 2653 }, { "epoch": 3.03, "grad_norm": 1.4881255043094301, "learning_rate": 0.00021233687427524428, "loss": 4.2239, "step": 2654 }, { "epoch": 3.03, "grad_norm": 1.5261693157953908, "learning_rate": 0.00021212446786002977, "loss": 4.3056, "step": 2655 }, { "epoch": 3.04, "grad_norm": 0.7860846878830503, "learning_rate": 0.00021191210961980493, "loss": 4.15, "step": 2656 }, { "epoch": 3.04, "grad_norm": 1.4787732598354335, "learning_rate": 0.0002116997996709885, "loss": 4.0094, "step": 2657 }, { "epoch": 3.04, "grad_norm": 1.4980757634326116, "learning_rate": 0.0002114875381299726, "loss": 4.0987, "step": 2658 }, { "epoch": 3.04, "grad_norm": 0.9545345050100846, "learning_rate": 0.00021127532511312297, "loss": 4.3616, "step": 2659 }, { "epoch": 3.04, "grad_norm": 2.329289846801104, "learning_rate": 0.00021106316073677875, "loss": 4.0631, "step": 2660 }, { "epoch": 3.04, "grad_norm": 2.065529521756144, "learning_rate": 0.00021085104511725243, "loss": 4.1527, "step": 2661 }, { "epoch": 3.04, "grad_norm": 0.8745820558575126, "learning_rate": 0.00021063897837082947, "loss": 4.1326, "step": 2662 }, { "epoch": 3.04, "grad_norm": 1.838569962989226, "learning_rate": 0.00021042696061376897, "loss": 4.2325, "step": 2663 }, { "epoch": 3.04, "grad_norm": 0.9027832054836259, "learning_rate": 0.00021021499196230292, "loss": 4.2452, "step": 2664 }, { "epoch": 3.05, "grad_norm": 2.004640639830266, "learning_rate": 0.00021000307253263656, "loss": 4.1976, "step": 2665 }, { "epoch": 3.05, "grad_norm": 1.0051871063020932, "learning_rate": 0.00020979120244094787, "loss": 4.3394, "step": 2666 }, { "epoch": 3.05, "grad_norm": 1.6446721157067745, "learning_rate": 0.00020957938180338803, "loss": 4.0812, "step": 2667 }, { "epoch": 3.05, "grad_norm": 0.9178515402890869, "learning_rate": 0.000209367610736081, "loss": 3.9515, "step": 2668 }, { "epoch": 3.05, "grad_norm": 1.0501235274774758, "learning_rate": 0.00020915588935512363, "loss": 4.1962, "step": 2669 }, { "epoch": 3.05, "grad_norm": 2.0750672473350504, "learning_rate": 0.00020894421777658544, "loss": 4.0438, "step": 2670 }, { "epoch": 3.05, "grad_norm": 1.9479414065952017, "learning_rate": 0.00020873259611650874, "loss": 3.9567, "step": 2671 }, { "epoch": 3.05, "grad_norm": 1.811164421476797, "learning_rate": 0.00020852102449090838, "loss": 4.0245, "step": 2672 }, { "epoch": 3.05, "grad_norm": 1.5447498867380527, "learning_rate": 0.0002083095030157719, "loss": 4.3063, "step": 2673 }, { "epoch": 3.06, "grad_norm": 1.0998796403756723, "learning_rate": 0.00020809803180705928, "loss": 4.3357, "step": 2674 }, { "epoch": 3.06, "grad_norm": 1.1688581819045187, "learning_rate": 0.00020788661098070286, "loss": 4.2906, "step": 2675 }, { "epoch": 3.06, "grad_norm": 1.6925154779110498, "learning_rate": 0.0002076752406526075, "loss": 4.2833, "step": 2676 }, { "epoch": 3.06, "grad_norm": 2.2082989049842885, "learning_rate": 0.0002074639209386503, "loss": 4.0648, "step": 2677 }, { "epoch": 3.06, "grad_norm": 2.1793976202979435, "learning_rate": 0.00020725265195468073, "loss": 4.1357, "step": 2678 }, { "epoch": 3.06, "grad_norm": 1.5134876071321495, "learning_rate": 0.00020704143381652015, "loss": 3.9925, "step": 2679 }, { "epoch": 3.06, "grad_norm": 1.3598460313990763, "learning_rate": 0.00020683026663996238, "loss": 4.2257, "step": 2680 }, { "epoch": 3.06, "grad_norm": 0.9858963700981123, "learning_rate": 0.00020661915054077316, "loss": 4.0519, "step": 2681 }, { "epoch": 3.07, "grad_norm": 1.178321340340512, "learning_rate": 0.00020640808563469022, "loss": 4.214, "step": 2682 }, { "epoch": 3.07, "grad_norm": 1.7172981860105319, "learning_rate": 0.0002061970720374232, "loss": 4.3686, "step": 2683 }, { "epoch": 3.07, "grad_norm": 1.0477243872266038, "learning_rate": 0.0002059861098646537, "loss": 4.1729, "step": 2684 }, { "epoch": 3.07, "grad_norm": 2.6117109457857097, "learning_rate": 0.00020577519923203498, "loss": 4.2265, "step": 2685 }, { "epoch": 3.07, "grad_norm": 2.698562561026729, "learning_rate": 0.00020556434025519242, "loss": 4.2613, "step": 2686 }, { "epoch": 3.07, "grad_norm": 3.1708653692198214, "learning_rate": 0.00020535353304972247, "loss": 4.2421, "step": 2687 }, { "epoch": 3.07, "grad_norm": 1.9094911589561623, "learning_rate": 0.0002051427777311937, "loss": 4.2907, "step": 2688 }, { "epoch": 3.07, "grad_norm": 4.570019846548903, "learning_rate": 0.00020493207441514598, "loss": 4.2938, "step": 2689 }, { "epoch": 3.07, "grad_norm": 1.3031766399729952, "learning_rate": 0.00020472142321709087, "loss": 4.2353, "step": 2690 }, { "epoch": 3.08, "grad_norm": 2.0044634562762234, "learning_rate": 0.00020451082425251136, "loss": 4.0893, "step": 2691 }, { "epoch": 3.08, "grad_norm": 4.039965494960537, "learning_rate": 0.00020430027763686137, "loss": 4.2877, "step": 2692 }, { "epoch": 3.08, "grad_norm": 2.4430676462275334, "learning_rate": 0.0002040897834855665, "loss": 4.321, "step": 2693 }, { "epoch": 3.08, "grad_norm": 1.5555970763940636, "learning_rate": 0.0002038793419140238, "loss": 4.185, "step": 2694 }, { "epoch": 3.08, "grad_norm": 2.8700582176254485, "learning_rate": 0.0002036689530376011, "loss": 4.2749, "step": 2695 }, { "epoch": 3.08, "grad_norm": 3.247703315247495, "learning_rate": 0.0002034586169716373, "loss": 4.464, "step": 2696 }, { "epoch": 3.08, "grad_norm": 2.6545343780434347, "learning_rate": 0.0002032483338314427, "loss": 4.2278, "step": 2697 }, { "epoch": 3.08, "grad_norm": 2.3617182896214204, "learning_rate": 0.0002030381037322983, "loss": 4.2908, "step": 2698 }, { "epoch": 3.08, "grad_norm": 1.8786949773119794, "learning_rate": 0.00020282792678945636, "loss": 4.206, "step": 2699 }, { "epoch": 3.09, "grad_norm": 2.542431933100143, "learning_rate": 0.00020261780311813936, "loss": 4.1357, "step": 2700 }, { "epoch": 3.09, "eval_blimp_filtered_avg": 0.5353731343283582, "eval_blimp_filtered_std": 0.0056318479878109074, "step": 2700 }, { "epoch": 3.09, "eval_blimp_supplement_avg": 0.5258620689655172, "eval_blimp_supplement_std": 0.022937658138571546, "step": 2700 }, { "epoch": 3.09, "eval_vqa_filtered_avg": 0.33, "eval_vqa_filtered_std": 0.047258156262526045, "step": 2700 }, { "epoch": 3.09, "eval_winoground_filtered_avg": 0.49, "eval_winoground_filtered_std": 0.05024183937956912, "step": 2700 }, { "epoch": 3.09, "grad_norm": 0.7292275735702843, "learning_rate": 0.00020240773283354123, "loss": 4.1754, "step": 2701 }, { "epoch": 3.09, "grad_norm": 2.6074000685821312, "learning_rate": 0.00020219771605082634, "loss": 4.1111, "step": 2702 }, { "epoch": 3.09, "grad_norm": 1.993304006344121, "learning_rate": 0.0002019877528851299, "loss": 4.4819, "step": 2703 }, { "epoch": 3.09, "grad_norm": 1.6207996773244095, "learning_rate": 0.00020177784345155728, "loss": 4.2965, "step": 2704 }, { "epoch": 3.09, "grad_norm": 0.5531651560433702, "learning_rate": 0.00020156798786518496, "loss": 4.265, "step": 2705 }, { "epoch": 3.09, "grad_norm": 1.8497052989198064, "learning_rate": 0.00020135818624105958, "loss": 4.2033, "step": 2706 }, { "epoch": 3.09, "grad_norm": 1.488630127697065, "learning_rate": 0.00020114843869419824, "loss": 4.2435, "step": 2707 }, { "epoch": 3.09, "grad_norm": 2.0388096750792433, "learning_rate": 0.00020093874533958853, "loss": 4.3619, "step": 2708 }, { "epoch": 3.1, "grad_norm": 0.9224294577437181, "learning_rate": 0.00020072910629218807, "loss": 4.1371, "step": 2709 }, { "epoch": 3.1, "grad_norm": 3.002375925776754, "learning_rate": 0.00020051952166692493, "loss": 4.0652, "step": 2710 }, { "epoch": 3.1, "grad_norm": 2.6158857213290725, "learning_rate": 0.00020030999157869724, "loss": 4.4687, "step": 2711 }, { "epoch": 3.1, "grad_norm": 0.7934475196892388, "learning_rate": 0.00020010051614237335, "loss": 4.2267, "step": 2712 }, { "epoch": 3.1, "grad_norm": 0.9783242840319151, "learning_rate": 0.00019989109547279143, "loss": 4.3585, "step": 2713 }, { "epoch": 3.1, "grad_norm": 2.050480858390498, "learning_rate": 0.00019968172968475985, "loss": 4.4526, "step": 2714 }, { "epoch": 3.1, "grad_norm": 0.9919909558947734, "learning_rate": 0.00019947241889305676, "loss": 4.2746, "step": 2715 }, { "epoch": 3.1, "grad_norm": 0.9857840157863261, "learning_rate": 0.00019926316321243029, "loss": 4.2104, "step": 2716 }, { "epoch": 3.11, "grad_norm": 0.5760178940515555, "learning_rate": 0.00019905396275759815, "loss": 4.2138, "step": 2717 }, { "epoch": 3.11, "grad_norm": 0.9199717972661715, "learning_rate": 0.00019884481764324794, "loss": 4.4121, "step": 2718 }, { "epoch": 3.11, "grad_norm": 1.316906455578091, "learning_rate": 0.00019863572798403683, "loss": 4.1376, "step": 2719 }, { "epoch": 3.11, "grad_norm": 3.6629572339659133, "learning_rate": 0.0001984266938945918, "loss": 4.069, "step": 2720 }, { "epoch": 3.11, "grad_norm": 1.0645284802737562, "learning_rate": 0.00019821771548950902, "loss": 4.1634, "step": 2721 }, { "epoch": 3.11, "grad_norm": 0.9629434886401738, "learning_rate": 0.00019800879288335433, "loss": 4.376, "step": 2722 }, { "epoch": 3.11, "grad_norm": 1.360000287660664, "learning_rate": 0.000197799926190663, "loss": 3.9352, "step": 2723 }, { "epoch": 3.11, "grad_norm": 0.8424103133351177, "learning_rate": 0.00019759111552593962, "loss": 4.1944, "step": 2724 }, { "epoch": 3.11, "grad_norm": 1.1651973218605336, "learning_rate": 0.00019738236100365813, "loss": 4.3105, "step": 2725 }, { "epoch": 3.12, "grad_norm": 1.1297791010771425, "learning_rate": 0.00019717366273826146, "loss": 4.2934, "step": 2726 }, { "epoch": 3.12, "grad_norm": 2.4291880833848025, "learning_rate": 0.0001969650208441619, "loss": 4.1806, "step": 2727 }, { "epoch": 3.12, "grad_norm": 1.2374885655487293, "learning_rate": 0.0001967564354357408, "loss": 4.1317, "step": 2728 }, { "epoch": 3.12, "grad_norm": 0.8721553688523321, "learning_rate": 0.00019654790662734862, "loss": 4.4364, "step": 2729 }, { "epoch": 3.12, "grad_norm": 0.822995933702198, "learning_rate": 0.0001963394345333046, "loss": 4.3738, "step": 2730 }, { "epoch": 3.12, "grad_norm": 0.8880750647582601, "learning_rate": 0.00019613101926789698, "loss": 4.2323, "step": 2731 }, { "epoch": 3.12, "grad_norm": 0.7334582507362306, "learning_rate": 0.00019592266094538283, "loss": 4.2155, "step": 2732 }, { "epoch": 3.12, "grad_norm": 1.3524743414472449, "learning_rate": 0.00019571435967998828, "loss": 4.2045, "step": 2733 }, { "epoch": 3.12, "grad_norm": 0.6546135611244575, "learning_rate": 0.00019550611558590765, "loss": 4.2247, "step": 2734 }, { "epoch": 3.13, "grad_norm": 0.8353773935104888, "learning_rate": 0.00019529792877730426, "loss": 4.2016, "step": 2735 }, { "epoch": 3.13, "grad_norm": 0.9534438873953378, "learning_rate": 0.00019508979936830995, "loss": 4.221, "step": 2736 }, { "epoch": 3.13, "grad_norm": 0.7253657811979985, "learning_rate": 0.0001948817274730254, "loss": 4.4386, "step": 2737 }, { "epoch": 3.13, "grad_norm": 0.8620214751918487, "learning_rate": 0.00019467371320551903, "loss": 4.2381, "step": 2738 }, { "epoch": 3.13, "grad_norm": 0.8938925562649608, "learning_rate": 0.0001944657566798283, "loss": 4.2226, "step": 2739 }, { "epoch": 3.13, "grad_norm": 0.9793237651848, "learning_rate": 0.00019425785800995877, "loss": 4.163, "step": 2740 }, { "epoch": 3.13, "grad_norm": 0.6287904392424756, "learning_rate": 0.0001940500173098845, "loss": 4.0035, "step": 2741 }, { "epoch": 3.13, "grad_norm": 0.8996685690407565, "learning_rate": 0.00019384223469354742, "loss": 4.4299, "step": 2742 }, { "epoch": 3.13, "grad_norm": 2.6857428303972153, "learning_rate": 0.00019363451027485778, "loss": 4.3645, "step": 2743 }, { "epoch": 3.14, "grad_norm": 2.2128391034201176, "learning_rate": 0.00019342684416769404, "loss": 4.1948, "step": 2744 }, { "epoch": 3.14, "grad_norm": 1.4568577274529326, "learning_rate": 0.00019321923648590262, "loss": 4.0352, "step": 2745 }, { "epoch": 3.14, "grad_norm": 0.7772223415659458, "learning_rate": 0.00019301168734329793, "loss": 4.2246, "step": 2746 }, { "epoch": 3.14, "grad_norm": 1.014222072539158, "learning_rate": 0.00019280419685366193, "loss": 4.0578, "step": 2747 }, { "epoch": 3.14, "grad_norm": 1.3241057053060967, "learning_rate": 0.00019259676513074501, "loss": 4.2741, "step": 2748 }, { "epoch": 3.14, "grad_norm": 1.176105843541019, "learning_rate": 0.0001923893922882651, "loss": 4.2011, "step": 2749 }, { "epoch": 3.14, "grad_norm": 1.4545692757015989, "learning_rate": 0.00019218207843990778, "loss": 4.078, "step": 2750 }, { "epoch": 3.14, "grad_norm": 2.769358037826632, "learning_rate": 0.00019197482369932614, "loss": 4.317, "step": 2751 }, { "epoch": 3.15, "grad_norm": 0.9663605095477744, "learning_rate": 0.00019176762818014125, "loss": 4.2099, "step": 2752 }, { "epoch": 3.15, "grad_norm": 2.8197785374562043, "learning_rate": 0.00019156049199594153, "loss": 4.345, "step": 2753 }, { "epoch": 3.15, "grad_norm": 2.205186944227695, "learning_rate": 0.0001913534152602828, "loss": 4.2379, "step": 2754 }, { "epoch": 3.15, "grad_norm": 0.9675083150671928, "learning_rate": 0.00019114639808668834, "loss": 4.2494, "step": 2755 }, { "epoch": 3.15, "grad_norm": 2.801212887122284, "learning_rate": 0.00019093944058864879, "loss": 4.3394, "step": 2756 }, { "epoch": 3.15, "grad_norm": 1.5755646295888077, "learning_rate": 0.0001907325428796221, "loss": 4.1915, "step": 2757 }, { "epoch": 3.15, "grad_norm": 1.867687368817391, "learning_rate": 0.00019052570507303344, "loss": 4.1686, "step": 2758 }, { "epoch": 3.15, "grad_norm": 3.870990645689623, "learning_rate": 0.00019031892728227503, "loss": 4.0606, "step": 2759 }, { "epoch": 3.15, "grad_norm": 1.024705655299172, "learning_rate": 0.00019011220962070633, "loss": 4.1624, "step": 2760 }, { "epoch": 3.16, "grad_norm": 1.7441593562682862, "learning_rate": 0.00018990555220165377, "loss": 4.3822, "step": 2761 }, { "epoch": 3.16, "grad_norm": 4.759590525575753, "learning_rate": 0.00018969895513841074, "loss": 4.1238, "step": 2762 }, { "epoch": 3.16, "grad_norm": 0.9968501678298581, "learning_rate": 0.00018949241854423771, "loss": 4.235, "step": 2763 }, { "epoch": 3.16, "grad_norm": 1.451454375080533, "learning_rate": 0.00018928594253236165, "loss": 4.3202, "step": 2764 }, { "epoch": 3.16, "grad_norm": 1.3257194039653943, "learning_rate": 0.00018907952721597662, "loss": 4.3438, "step": 2765 }, { "epoch": 3.16, "grad_norm": 2.1374024861455907, "learning_rate": 0.00018887317270824326, "loss": 4.2327, "step": 2766 }, { "epoch": 3.16, "grad_norm": 1.275263829009234, "learning_rate": 0.00018866687912228906, "loss": 4.1781, "step": 2767 }, { "epoch": 3.16, "grad_norm": 1.06338026796848, "learning_rate": 0.00018846064657120778, "loss": 4.1637, "step": 2768 }, { "epoch": 3.16, "grad_norm": 0.9782036180312333, "learning_rate": 0.00018825447516806, "loss": 4.2968, "step": 2769 }, { "epoch": 3.17, "grad_norm": 2.7796977175907625, "learning_rate": 0.00018804836502587277, "loss": 4.2908, "step": 2770 }, { "epoch": 3.17, "grad_norm": 1.3630411955029582, "learning_rate": 0.00018784231625763943, "loss": 4.1112, "step": 2771 }, { "epoch": 3.17, "grad_norm": 1.934893774875886, "learning_rate": 0.00018763632897631963, "loss": 4.2957, "step": 2772 }, { "epoch": 3.17, "grad_norm": 2.4674779980346835, "learning_rate": 0.00018743040329483946, "loss": 4.4524, "step": 2773 }, { "epoch": 3.17, "grad_norm": 1.6088177691005527, "learning_rate": 0.00018722453932609121, "loss": 4.0494, "step": 2774 }, { "epoch": 3.17, "grad_norm": 1.2479713377836712, "learning_rate": 0.00018701873718293333, "loss": 4.1084, "step": 2775 }, { "epoch": 3.17, "grad_norm": 0.6510558887609793, "learning_rate": 0.00018681299697819028, "loss": 4.4719, "step": 2776 }, { "epoch": 3.17, "grad_norm": 3.6068418026326805, "learning_rate": 0.00018660731882465267, "loss": 4.2634, "step": 2777 }, { "epoch": 3.17, "grad_norm": 1.814194776720197, "learning_rate": 0.00018640170283507705, "loss": 4.2141, "step": 2778 }, { "epoch": 3.18, "grad_norm": 1.439611143975839, "learning_rate": 0.00018619614912218584, "loss": 4.2321, "step": 2779 }, { "epoch": 3.18, "grad_norm": 1.3044665021327235, "learning_rate": 0.0001859906577986677, "loss": 4.1679, "step": 2780 }, { "epoch": 3.18, "grad_norm": 0.8740949296319453, "learning_rate": 0.00018578522897717636, "loss": 4.1068, "step": 2781 }, { "epoch": 3.18, "grad_norm": 4.139889055194011, "learning_rate": 0.0001855798627703319, "loss": 4.2989, "step": 2782 }, { "epoch": 3.18, "grad_norm": 1.703480693619845, "learning_rate": 0.00018537455929071975, "loss": 4.2224, "step": 2783 }, { "epoch": 3.18, "grad_norm": 1.9046471578387805, "learning_rate": 0.00018516931865089135, "loss": 4.0958, "step": 2784 }, { "epoch": 3.18, "grad_norm": 2.0730089257085496, "learning_rate": 0.00018496414096336308, "loss": 4.0995, "step": 2785 }, { "epoch": 3.18, "grad_norm": 1.1309095981747728, "learning_rate": 0.00018475902634061724, "loss": 4.4566, "step": 2786 }, { "epoch": 3.19, "grad_norm": 3.5848310589617807, "learning_rate": 0.00018455397489510145, "loss": 4.1497, "step": 2787 }, { "epoch": 3.19, "grad_norm": 1.9759554264190222, "learning_rate": 0.0001843489867392289, "loss": 4.2198, "step": 2788 }, { "epoch": 3.19, "grad_norm": 1.174977086817324, "learning_rate": 0.00018414406198537762, "loss": 4.1557, "step": 2789 }, { "epoch": 3.19, "grad_norm": 1.4517393729020691, "learning_rate": 0.00018393920074589114, "loss": 4.1268, "step": 2790 }, { "epoch": 3.19, "grad_norm": 1.3096553096237091, "learning_rate": 0.00018373440313307834, "loss": 4.0351, "step": 2791 }, { "epoch": 3.19, "grad_norm": 2.714336817661011, "learning_rate": 0.00018352966925921314, "loss": 4.2838, "step": 2792 }, { "epoch": 3.19, "grad_norm": 2.124072644121655, "learning_rate": 0.00018332499923653416, "loss": 4.2268, "step": 2793 }, { "epoch": 3.19, "grad_norm": 1.8543296956502366, "learning_rate": 0.00018312039317724534, "loss": 4.2252, "step": 2794 }, { "epoch": 3.19, "grad_norm": 0.8846544033255543, "learning_rate": 0.0001829158511935156, "loss": 4.4064, "step": 2795 }, { "epoch": 3.2, "grad_norm": 1.5569247935975998, "learning_rate": 0.00018271137339747858, "loss": 4.1093, "step": 2796 }, { "epoch": 3.2, "grad_norm": 1.2665889513708246, "learning_rate": 0.00018250695990123297, "loss": 4.2194, "step": 2797 }, { "epoch": 3.2, "grad_norm": 0.8271480060841104, "learning_rate": 0.00018230261081684168, "loss": 4.2065, "step": 2798 }, { "epoch": 3.2, "grad_norm": 1.9493994903903273, "learning_rate": 0.00018209832625633285, "loss": 4.1319, "step": 2799 }, { "epoch": 3.2, "grad_norm": 0.9277148937095753, "learning_rate": 0.00018189410633169908, "loss": 4.2486, "step": 2800 }, { "epoch": 3.2, "eval_blimp_filtered_avg": 0.5273134328358209, "eval_blimp_filtered_std": 0.005565775425191919, "step": 2800 }, { "epoch": 3.2, "eval_blimp_supplement_avg": 0.5301724137931034, "eval_blimp_supplement_std": 0.023129847773216545, "step": 2800 }, { "epoch": 3.2, "eval_vqa_filtered_avg": 0.38, "eval_vqa_filtered_std": 0.048783173121456316, "step": 2800 }, { "epoch": 3.2, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 2800 }, { "epoch": 3.2, "grad_norm": 1.630683049379475, "learning_rate": 0.00018168995115489745, "loss": 4.3305, "step": 2801 }, { "epoch": 3.2, "grad_norm": 0.9216934625829307, "learning_rate": 0.00018148586083784955, "loss": 4.1906, "step": 2802 }, { "epoch": 3.2, "grad_norm": 1.173095770955462, "learning_rate": 0.00018128183549244155, "loss": 4.1595, "step": 2803 }, { "epoch": 3.2, "grad_norm": 1.475995638245296, "learning_rate": 0.00018107787523052385, "loss": 4.4443, "step": 2804 }, { "epoch": 3.21, "grad_norm": 1.3687966744577391, "learning_rate": 0.0001808739801639113, "loss": 4.0034, "step": 2805 }, { "epoch": 3.21, "grad_norm": 2.797063435389921, "learning_rate": 0.0001806701504043829, "loss": 4.148, "step": 2806 }, { "epoch": 3.21, "grad_norm": 1.3105120131032841, "learning_rate": 0.0001804663860636818, "loss": 4.3193, "step": 2807 }, { "epoch": 3.21, "grad_norm": 1.45735139256422, "learning_rate": 0.00018026268725351548, "loss": 3.9888, "step": 2808 }, { "epoch": 3.21, "grad_norm": 3.0334698428277767, "learning_rate": 0.0001800590540855555, "loss": 4.0786, "step": 2809 }, { "epoch": 3.21, "grad_norm": 1.2674645044954693, "learning_rate": 0.00017985548667143707, "loss": 4.1715, "step": 2810 }, { "epoch": 3.21, "grad_norm": 2.042616524649647, "learning_rate": 0.00017965198512275975, "loss": 4.1544, "step": 2811 }, { "epoch": 3.21, "grad_norm": 1.3436950328277817, "learning_rate": 0.00017944854955108684, "loss": 4.1261, "step": 2812 }, { "epoch": 3.21, "grad_norm": 1.8908413602736949, "learning_rate": 0.0001792451800679454, "loss": 4.4054, "step": 2813 }, { "epoch": 3.22, "grad_norm": 1.1175925899970909, "learning_rate": 0.00017904187678482653, "loss": 4.1414, "step": 2814 }, { "epoch": 3.22, "grad_norm": 3.7512680533101066, "learning_rate": 0.00017883863981318462, "loss": 4.1348, "step": 2815 }, { "epoch": 3.22, "grad_norm": 1.003180208125442, "learning_rate": 0.00017863546926443803, "loss": 4.1727, "step": 2816 }, { "epoch": 3.22, "grad_norm": 1.7800670935374088, "learning_rate": 0.0001784323652499686, "loss": 4.1945, "step": 2817 }, { "epoch": 3.22, "grad_norm": 2.123772761726997, "learning_rate": 0.0001782293278811218, "loss": 4.1553, "step": 2818 }, { "epoch": 3.22, "grad_norm": 0.9025643186003013, "learning_rate": 0.0001780263572692063, "loss": 4.046, "step": 2819 }, { "epoch": 3.22, "grad_norm": 1.767623134027193, "learning_rate": 0.0001778234535254944, "loss": 4.3584, "step": 2820 }, { "epoch": 3.22, "grad_norm": 1.742710690728317, "learning_rate": 0.00017762061676122168, "loss": 4.2054, "step": 2821 }, { "epoch": 3.23, "grad_norm": 2.236017233257212, "learning_rate": 0.00017741784708758709, "loss": 4.1117, "step": 2822 }, { "epoch": 3.23, "grad_norm": 1.5003693279750003, "learning_rate": 0.00017721514461575252, "loss": 3.9894, "step": 2823 }, { "epoch": 3.23, "grad_norm": 1.9296100255091893, "learning_rate": 0.00017701250945684338, "loss": 4.1613, "step": 2824 }, { "epoch": 3.23, "grad_norm": 4.9122113548548185, "learning_rate": 0.00017680994172194792, "loss": 4.3209, "step": 2825 }, { "epoch": 3.23, "grad_norm": 1.2134152007691492, "learning_rate": 0.00017660744152211757, "loss": 4.2426, "step": 2826 }, { "epoch": 3.23, "grad_norm": 2.178043480665856, "learning_rate": 0.00017640500896836663, "loss": 3.8948, "step": 2827 }, { "epoch": 3.23, "grad_norm": 2.3159068914153194, "learning_rate": 0.00017620264417167233, "loss": 4.1773, "step": 2828 }, { "epoch": 3.23, "grad_norm": 4.269184055570103, "learning_rate": 0.0001760003472429749, "loss": 4.4059, "step": 2829 }, { "epoch": 3.23, "grad_norm": 1.3435879904144112, "learning_rate": 0.00017579811829317721, "loss": 4.2174, "step": 2830 }, { "epoch": 3.24, "grad_norm": 2.0081736077872283, "learning_rate": 0.00017559595743314483, "loss": 4.2244, "step": 2831 }, { "epoch": 3.24, "grad_norm": 2.735090810337554, "learning_rate": 0.00017539386477370618, "loss": 4.0842, "step": 2832 }, { "epoch": 3.24, "grad_norm": 3.5569650378515165, "learning_rate": 0.00017519184042565203, "loss": 4.2274, "step": 2833 }, { "epoch": 3.24, "grad_norm": 1.8216625663317314, "learning_rate": 0.00017498988449973614, "loss": 4.0313, "step": 2834 }, { "epoch": 3.24, "grad_norm": 1.271137937095734, "learning_rate": 0.0001747879971066742, "loss": 4.3292, "step": 2835 }, { "epoch": 3.24, "grad_norm": 3.214508185974161, "learning_rate": 0.00017458617835714477, "loss": 4.051, "step": 2836 }, { "epoch": 3.24, "grad_norm": 2.937214686056647, "learning_rate": 0.0001743844283617885, "loss": 4.2712, "step": 2837 }, { "epoch": 3.24, "grad_norm": 1.5105531541861679, "learning_rate": 0.00017418274723120857, "loss": 3.9988, "step": 2838 }, { "epoch": 3.24, "grad_norm": 2.0023142527305517, "learning_rate": 0.00017398113507597035, "loss": 4.2166, "step": 2839 }, { "epoch": 3.25, "grad_norm": 2.4779953064375047, "learning_rate": 0.00017377959200660118, "loss": 3.9077, "step": 2840 }, { "epoch": 3.25, "grad_norm": 1.317787121310388, "learning_rate": 0.00017357811813359074, "loss": 4.214, "step": 2841 }, { "epoch": 3.25, "grad_norm": 1.9256972091848976, "learning_rate": 0.00017337671356739076, "loss": 4.1372, "step": 2842 }, { "epoch": 3.25, "grad_norm": 1.4817489697778077, "learning_rate": 0.00017317537841841492, "loss": 4.1376, "step": 2843 }, { "epoch": 3.25, "grad_norm": 1.2855792677187068, "learning_rate": 0.00017297411279703888, "loss": 3.9826, "step": 2844 }, { "epoch": 3.25, "grad_norm": 2.318716505291194, "learning_rate": 0.00017277291681360018, "loss": 4.0206, "step": 2845 }, { "epoch": 3.25, "grad_norm": 2.642236361428631, "learning_rate": 0.0001725717905783982, "loss": 4.2664, "step": 2846 }, { "epoch": 3.25, "grad_norm": 2.47296432818547, "learning_rate": 0.00017237073420169416, "loss": 4.1272, "step": 2847 }, { "epoch": 3.25, "grad_norm": 3.01850789171758, "learning_rate": 0.00017216974779371056, "loss": 4.2043, "step": 2848 }, { "epoch": 3.26, "grad_norm": 1.1187419125300315, "learning_rate": 0.0001719688314646321, "loss": 4.2573, "step": 2849 }, { "epoch": 3.26, "grad_norm": 1.0302262446464754, "learning_rate": 0.00017176798532460473, "loss": 4.1824, "step": 2850 }, { "epoch": 3.26, "grad_norm": 2.0804115302692514, "learning_rate": 0.00017156720948373593, "loss": 4.2497, "step": 2851 }, { "epoch": 3.26, "grad_norm": 2.4948952132446385, "learning_rate": 0.00017136650405209515, "loss": 4.2766, "step": 2852 }, { "epoch": 3.26, "grad_norm": 1.6536561659707587, "learning_rate": 0.00017116586913971235, "loss": 3.9676, "step": 2853 }, { "epoch": 3.26, "grad_norm": 2.476760113222643, "learning_rate": 0.00017096530485657943, "loss": 4.1596, "step": 2854 }, { "epoch": 3.26, "grad_norm": 7.433123354072842, "learning_rate": 0.00017076481131264947, "loss": 4.0824, "step": 2855 }, { "epoch": 3.26, "grad_norm": 1.7065925266498387, "learning_rate": 0.00017056438861783686, "loss": 4.1428, "step": 2856 }, { "epoch": 3.27, "grad_norm": 1.6243008569365676, "learning_rate": 0.00017036403688201678, "loss": 4.093, "step": 2857 }, { "epoch": 3.27, "grad_norm": 3.1863518320588144, "learning_rate": 0.00017016375621502582, "loss": 4.2422, "step": 2858 }, { "epoch": 3.27, "grad_norm": 3.8071043921466723, "learning_rate": 0.00016996354672666147, "loss": 4.3557, "step": 2859 }, { "epoch": 3.27, "grad_norm": 2.0617686646828197, "learning_rate": 0.0001697634085266826, "loss": 4.087, "step": 2860 }, { "epoch": 3.27, "grad_norm": 9.1727244499222, "learning_rate": 0.00016956334172480838, "loss": 4.3879, "step": 2861 }, { "epoch": 3.27, "grad_norm": 5.261908166753962, "learning_rate": 0.0001693633464307191, "loss": 4.9178, "step": 2862 }, { "epoch": 3.27, "grad_norm": 6.060845270750296, "learning_rate": 0.00016916342275405605, "loss": 4.9404, "step": 2863 }, { "epoch": 3.27, "grad_norm": 2.271478110637955, "learning_rate": 0.00016896357080442104, "loss": 4.9488, "step": 2864 }, { "epoch": 3.27, "grad_norm": 2.0197099285268916, "learning_rate": 0.00016876379069137652, "loss": 4.7329, "step": 2865 }, { "epoch": 3.28, "grad_norm": 1.872812430044207, "learning_rate": 0.00016856408252444548, "loss": 4.5932, "step": 2866 }, { "epoch": 3.28, "grad_norm": 4.116240753372435, "learning_rate": 0.00016836444641311194, "loss": 4.63, "step": 2867 }, { "epoch": 3.28, "grad_norm": 1.9219396269261015, "learning_rate": 0.00016816488246682002, "loss": 4.5567, "step": 2868 }, { "epoch": 3.28, "grad_norm": 3.5831235722854804, "learning_rate": 0.0001679653907949744, "loss": 4.726, "step": 2869 }, { "epoch": 3.28, "grad_norm": 2.940435912801413, "learning_rate": 0.00016776597150693995, "loss": 4.4976, "step": 2870 }, { "epoch": 3.28, "grad_norm": 1.8163356851602066, "learning_rate": 0.00016756662471204204, "loss": 4.3669, "step": 2871 }, { "epoch": 3.28, "grad_norm": 1.40440816874465, "learning_rate": 0.00016736735051956633, "loss": 4.454, "step": 2872 }, { "epoch": 3.28, "grad_norm": 2.216907232220189, "learning_rate": 0.00016716814903875856, "loss": 4.3039, "step": 2873 }, { "epoch": 3.28, "grad_norm": 2.98365223977936, "learning_rate": 0.00016696902037882468, "loss": 4.4578, "step": 2874 }, { "epoch": 3.29, "grad_norm": 9.014507515227566, "learning_rate": 0.00016676996464893074, "loss": 4.5878, "step": 2875 }, { "epoch": 3.29, "grad_norm": 5.043157847690633, "learning_rate": 0.0001665709819582027, "loss": 4.4125, "step": 2876 }, { "epoch": 3.29, "grad_norm": 1.5068394288891527, "learning_rate": 0.0001663720724157267, "loss": 4.4419, "step": 2877 }, { "epoch": 3.29, "grad_norm": 3.8361428654282195, "learning_rate": 0.0001661732361305484, "loss": 4.4068, "step": 2878 }, { "epoch": 3.29, "grad_norm": 3.872461133171505, "learning_rate": 0.00016597447321167364, "loss": 4.5011, "step": 2879 }, { "epoch": 3.29, "grad_norm": 2.6401463943459262, "learning_rate": 0.00016577578376806787, "loss": 4.3456, "step": 2880 }, { "epoch": 3.29, "grad_norm": 1.640577001122683, "learning_rate": 0.00016557716790865636, "loss": 4.2418, "step": 2881 }, { "epoch": 3.29, "grad_norm": 8.599084439651879, "learning_rate": 0.00016537862574232397, "loss": 4.3751, "step": 2882 }, { "epoch": 3.29, "grad_norm": 3.5581856191368715, "learning_rate": 0.0001651801573779152, "loss": 4.4489, "step": 2883 }, { "epoch": 3.3, "grad_norm": 4.1828779171024015, "learning_rate": 0.00016498176292423412, "loss": 4.5774, "step": 2884 }, { "epoch": 3.3, "grad_norm": 3.1045328923276987, "learning_rate": 0.00016478344249004412, "loss": 4.5375, "step": 2885 }, { "epoch": 3.3, "grad_norm": 2.754031983637328, "learning_rate": 0.00016458519618406834, "loss": 4.597, "step": 2886 }, { "epoch": 3.3, "grad_norm": 1.7748134900925425, "learning_rate": 0.00016438702411498878, "loss": 4.3639, "step": 2887 }, { "epoch": 3.3, "grad_norm": 1.849536286646378, "learning_rate": 0.00016418892639144715, "loss": 4.5251, "step": 2888 }, { "epoch": 3.3, "grad_norm": 2.542259761762008, "learning_rate": 0.00016399090312204433, "loss": 4.609, "step": 2889 }, { "epoch": 3.3, "grad_norm": 1.9888744187862533, "learning_rate": 0.0001637929544153403, "loss": 4.3906, "step": 2890 }, { "epoch": 3.3, "grad_norm": 3.2179599384776476, "learning_rate": 0.00016359508037985416, "loss": 4.3373, "step": 2891 }, { "epoch": 3.31, "grad_norm": 2.770506910609272, "learning_rate": 0.00016339728112406415, "loss": 4.5812, "step": 2892 }, { "epoch": 3.31, "grad_norm": 2.788267997463789, "learning_rate": 0.00016319955675640746, "loss": 4.4812, "step": 2893 }, { "epoch": 3.31, "grad_norm": 1.7771413862165066, "learning_rate": 0.0001630019073852804, "loss": 4.2795, "step": 2894 }, { "epoch": 3.31, "grad_norm": 3.9014400560439415, "learning_rate": 0.00016280433311903768, "loss": 4.5254, "step": 2895 }, { "epoch": 3.31, "grad_norm": 2.303044382020593, "learning_rate": 0.00016260683406599336, "loss": 4.2817, "step": 2896 }, { "epoch": 3.31, "grad_norm": 1.972416149284079, "learning_rate": 0.00016240941033442006, "loss": 4.1829, "step": 2897 }, { "epoch": 3.31, "grad_norm": 3.4124325812236163, "learning_rate": 0.00016221206203254909, "loss": 4.2986, "step": 2898 }, { "epoch": 3.31, "grad_norm": 1.6106578382569272, "learning_rate": 0.00016201478926857046, "loss": 4.2009, "step": 2899 }, { "epoch": 3.31, "grad_norm": 1.4507975904090147, "learning_rate": 0.00016181759215063273, "loss": 4.287, "step": 2900 }, { "epoch": 3.31, "eval_blimp_filtered_avg": 0.5256716417910448, "eval_blimp_filtered_std": 0.005630789097487669, "step": 2900 }, { "epoch": 3.31, "eval_blimp_supplement_avg": 0.5172413793103449, "eval_blimp_supplement_std": 0.02271711993467397, "step": 2900 }, { "epoch": 3.31, "eval_vqa_filtered_avg": 0.38, "eval_vqa_filtered_std": 0.048783173121456316, "step": 2900 }, { "epoch": 3.31, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 2900 }, { "epoch": 3.32, "grad_norm": 2.9598082300237327, "learning_rate": 0.00016162047078684296, "loss": 4.146, "step": 2901 }, { "epoch": 3.32, "grad_norm": 2.307619242709991, "learning_rate": 0.00016142342528526675, "loss": 4.4024, "step": 2902 }, { "epoch": 3.32, "grad_norm": 1.1504692737669255, "learning_rate": 0.00016122645575392824, "loss": 4.15, "step": 2903 }, { "epoch": 3.32, "grad_norm": 1.394519406337347, "learning_rate": 0.00016102956230080954, "loss": 3.9863, "step": 2904 }, { "epoch": 3.32, "grad_norm": 1.554043638712319, "learning_rate": 0.00016083274503385128, "loss": 4.2885, "step": 2905 }, { "epoch": 3.32, "grad_norm": 3.105550375785096, "learning_rate": 0.00016063600406095234, "loss": 4.233, "step": 2906 }, { "epoch": 3.32, "grad_norm": 1.6580135872056228, "learning_rate": 0.00016043933948997, "loss": 4.1895, "step": 2907 }, { "epoch": 3.32, "grad_norm": 2.1140479815888535, "learning_rate": 0.0001602427514287191, "loss": 4.3222, "step": 2908 }, { "epoch": 3.32, "grad_norm": 2.4932858413247754, "learning_rate": 0.000160046239984973, "loss": 4.237, "step": 2909 }, { "epoch": 3.33, "grad_norm": 1.9660725508142398, "learning_rate": 0.00015984980526646288, "loss": 4.2804, "step": 2910 }, { "epoch": 3.33, "grad_norm": 2.5568933880403906, "learning_rate": 0.00015965344738087795, "loss": 4.0959, "step": 2911 }, { "epoch": 3.33, "grad_norm": 10.2083464851958, "learning_rate": 0.00015945716643586504, "loss": 4.1955, "step": 2912 }, { "epoch": 3.33, "grad_norm": 3.150228399424194, "learning_rate": 0.00015926096253902897, "loss": 4.3907, "step": 2913 }, { "epoch": 3.33, "grad_norm": 5.30523949270125, "learning_rate": 0.00015906483579793262, "loss": 4.4763, "step": 2914 }, { "epoch": 3.33, "grad_norm": 3.627813877196551, "learning_rate": 0.00015886878632009622, "loss": 4.4606, "step": 2915 }, { "epoch": 3.33, "grad_norm": 2.711163356296052, "learning_rate": 0.00015867281421299754, "loss": 4.3531, "step": 2916 }, { "epoch": 3.33, "grad_norm": 2.1567278220400987, "learning_rate": 0.00015847691958407223, "loss": 4.2971, "step": 2917 }, { "epoch": 3.33, "grad_norm": 3.3714256259490907, "learning_rate": 0.00015828110254071328, "loss": 4.1778, "step": 2918 }, { "epoch": 3.34, "grad_norm": 3.1382605079898656, "learning_rate": 0.00015808536319027134, "loss": 4.2477, "step": 2919 }, { "epoch": 3.34, "grad_norm": 2.2571459281214685, "learning_rate": 0.00015788970164005403, "loss": 4.1207, "step": 2920 }, { "epoch": 3.34, "grad_norm": 2.7798026822841564, "learning_rate": 0.00015769411799732697, "loss": 4.2812, "step": 2921 }, { "epoch": 3.34, "grad_norm": 4.758955793179381, "learning_rate": 0.0001574986123693126, "loss": 4.2582, "step": 2922 }, { "epoch": 3.34, "grad_norm": 2.6956204142506626, "learning_rate": 0.0001573031848631907, "loss": 4.3179, "step": 2923 }, { "epoch": 3.34, "grad_norm": 3.8310735404436747, "learning_rate": 0.00015710783558609833, "loss": 4.2581, "step": 2924 }, { "epoch": 3.34, "grad_norm": 3.3037076939664254, "learning_rate": 0.0001569125646451294, "loss": 4.188, "step": 2925 }, { "epoch": 3.34, "grad_norm": 8.273292743295352, "learning_rate": 0.0001567173721473351, "loss": 4.3601, "step": 2926 }, { "epoch": 3.35, "grad_norm": 1.99025392767604, "learning_rate": 0.00015652225819972365, "loss": 4.1211, "step": 2927 }, { "epoch": 3.35, "grad_norm": 2.208580300716964, "learning_rate": 0.00015632722290926006, "loss": 4.1798, "step": 2928 }, { "epoch": 3.35, "grad_norm": 3.3852444702939843, "learning_rate": 0.00015613226638286625, "loss": 4.5403, "step": 2929 }, { "epoch": 3.35, "grad_norm": 3.3386385704084036, "learning_rate": 0.00015593738872742103, "loss": 4.4593, "step": 2930 }, { "epoch": 3.35, "grad_norm": 2.448766082647211, "learning_rate": 0.00015574259004975995, "loss": 4.2114, "step": 2931 }, { "epoch": 3.35, "grad_norm": 18.315394578160113, "learning_rate": 0.0001555478704566753, "loss": 4.2419, "step": 2932 }, { "epoch": 3.35, "grad_norm": 1.86409821675218, "learning_rate": 0.0001553532300549158, "loss": 4.2527, "step": 2933 }, { "epoch": 3.35, "grad_norm": 8.458458282283749, "learning_rate": 0.00015515866895118692, "loss": 4.4318, "step": 2934 }, { "epoch": 3.35, "grad_norm": 4.521578618935152, "learning_rate": 0.00015496418725215077, "loss": 4.6523, "step": 2935 }, { "epoch": 3.36, "grad_norm": 3.584620714036975, "learning_rate": 0.00015476978506442573, "loss": 5.0449, "step": 2936 }, { "epoch": 3.36, "grad_norm": 5.997341126347501, "learning_rate": 0.0001545754624945867, "loss": 5.342, "step": 2937 }, { "epoch": 3.36, "grad_norm": 4.858225399009792, "learning_rate": 0.0001543812196491649, "loss": 5.2245, "step": 2938 }, { "epoch": 3.36, "grad_norm": 2.8337829197990927, "learning_rate": 0.00015418705663464784, "loss": 4.7901, "step": 2939 }, { "epoch": 3.36, "grad_norm": 3.274873438435907, "learning_rate": 0.00015399297355747925, "loss": 4.603, "step": 2940 }, { "epoch": 3.36, "grad_norm": 2.1292859099597155, "learning_rate": 0.00015379897052405925, "loss": 4.4665, "step": 2941 }, { "epoch": 3.36, "grad_norm": 1.5918541976301666, "learning_rate": 0.00015360504764074358, "loss": 4.2902, "step": 2942 }, { "epoch": 3.36, "grad_norm": 2.8409557328666515, "learning_rate": 0.00015341120501384444, "loss": 4.6114, "step": 2943 }, { "epoch": 3.36, "grad_norm": 3.744619664073681, "learning_rate": 0.00015321744274963002, "loss": 4.4554, "step": 2944 }, { "epoch": 3.37, "grad_norm": 3.343654972012537, "learning_rate": 0.00015302376095432428, "loss": 4.6114, "step": 2945 }, { "epoch": 3.37, "grad_norm": 10.898752165364591, "learning_rate": 0.00015283015973410727, "loss": 4.736, "step": 2946 }, { "epoch": 3.37, "grad_norm": 1.7686477370442448, "learning_rate": 0.00015263663919511466, "loss": 4.388, "step": 2947 }, { "epoch": 3.37, "grad_norm": 2.089048244529463, "learning_rate": 0.00015244319944343805, "loss": 4.5622, "step": 2948 }, { "epoch": 3.37, "grad_norm": 2.835059420065904, "learning_rate": 0.0001522498405851247, "loss": 4.3033, "step": 2949 }, { "epoch": 3.37, "grad_norm": 4.149785371751094, "learning_rate": 0.0001520565627261774, "loss": 4.6362, "step": 2950 }, { "epoch": 3.37, "grad_norm": 2.140474037529553, "learning_rate": 0.00015186336597255466, "loss": 4.576, "step": 2951 }, { "epoch": 3.37, "grad_norm": 1.713768363907163, "learning_rate": 0.0001516702504301706, "loss": 4.5662, "step": 2952 }, { "epoch": 3.37, "grad_norm": 1.493897242723164, "learning_rate": 0.00015147721620489463, "loss": 4.5068, "step": 2953 }, { "epoch": 3.38, "grad_norm": 1.574675355602964, "learning_rate": 0.00015128426340255173, "loss": 4.41, "step": 2954 }, { "epoch": 3.38, "grad_norm": 2.1609263911570866, "learning_rate": 0.0001510913921289222, "loss": 4.301, "step": 2955 }, { "epoch": 3.38, "grad_norm": 1.6909327197188215, "learning_rate": 0.00015089860248974156, "loss": 4.2753, "step": 2956 }, { "epoch": 3.38, "grad_norm": 1.7276914340539895, "learning_rate": 0.00015070589459070078, "loss": 4.3441, "step": 2957 }, { "epoch": 3.38, "grad_norm": 1.311909407953645, "learning_rate": 0.0001505132685374459, "loss": 4.4631, "step": 2958 }, { "epoch": 3.38, "grad_norm": 2.4276800759224995, "learning_rate": 0.0001503207244355779, "loss": 4.271, "step": 2959 }, { "epoch": 3.38, "grad_norm": 1.8315908800579928, "learning_rate": 0.00015012826239065296, "loss": 4.3313, "step": 2960 }, { "epoch": 3.38, "grad_norm": 1.4606940995719733, "learning_rate": 0.0001499358825081826, "loss": 4.2892, "step": 2961 }, { "epoch": 3.39, "grad_norm": 2.5579467010319803, "learning_rate": 0.00014974358489363302, "loss": 4.4045, "step": 2962 }, { "epoch": 3.39, "grad_norm": 1.85642265753712, "learning_rate": 0.00014955136965242512, "loss": 4.2922, "step": 2963 }, { "epoch": 3.39, "grad_norm": 0.9675815192792815, "learning_rate": 0.0001493592368899349, "loss": 4.3169, "step": 2964 }, { "epoch": 3.39, "grad_norm": 1.6453911761982585, "learning_rate": 0.0001491671867114931, "loss": 4.3375, "step": 2965 }, { "epoch": 3.39, "grad_norm": 1.3985374264470756, "learning_rate": 0.0001489752192223853, "loss": 4.3316, "step": 2966 }, { "epoch": 3.39, "grad_norm": 0.8361200779531592, "learning_rate": 0.00014878333452785132, "loss": 4.4045, "step": 2967 }, { "epoch": 3.39, "grad_norm": 1.6830535150135522, "learning_rate": 0.00014859153273308622, "loss": 4.3132, "step": 2968 }, { "epoch": 3.39, "grad_norm": 2.627165650528088, "learning_rate": 0.00014839981394323913, "loss": 4.1185, "step": 2969 }, { "epoch": 3.39, "grad_norm": 0.7901149211557768, "learning_rate": 0.00014820817826341397, "loss": 4.0683, "step": 2970 }, { "epoch": 3.4, "grad_norm": 1.2623048840601314, "learning_rate": 0.00014801662579866872, "loss": 4.3559, "step": 2971 }, { "epoch": 3.4, "grad_norm": 5.381616290351572, "learning_rate": 0.00014782515665401612, "loss": 4.471, "step": 2972 }, { "epoch": 3.4, "grad_norm": 0.8693086404483586, "learning_rate": 0.00014763377093442306, "loss": 4.2935, "step": 2973 }, { "epoch": 3.4, "grad_norm": 1.3339542615824342, "learning_rate": 0.00014744246874481078, "loss": 4.3158, "step": 2974 }, { "epoch": 3.4, "grad_norm": 1.5561431322745403, "learning_rate": 0.0001472512501900546, "loss": 4.3639, "step": 2975 }, { "epoch": 3.4, "grad_norm": 4.125034249480485, "learning_rate": 0.0001470601153749841, "loss": 4.2365, "step": 2976 }, { "epoch": 3.4, "grad_norm": 1.0603037867648728, "learning_rate": 0.00014686906440438294, "loss": 3.9552, "step": 2977 }, { "epoch": 3.4, "grad_norm": 2.7302665605442855, "learning_rate": 0.00014667809738298874, "loss": 4.3139, "step": 2978 }, { "epoch": 3.4, "grad_norm": 2.891402776067834, "learning_rate": 0.0001464872144154933, "loss": 4.3518, "step": 2979 }, { "epoch": 3.41, "grad_norm": 0.9166486193528715, "learning_rate": 0.00014629641560654196, "loss": 4.2925, "step": 2980 }, { "epoch": 3.41, "grad_norm": 1.0573575381822362, "learning_rate": 0.0001461057010607342, "loss": 4.2999, "step": 2981 }, { "epoch": 3.41, "grad_norm": 3.1213229024465416, "learning_rate": 0.00014591507088262335, "loss": 4.4475, "step": 2982 }, { "epoch": 3.41, "grad_norm": 1.20700919827106, "learning_rate": 0.00014572452517671634, "loss": 4.0381, "step": 2983 }, { "epoch": 3.41, "grad_norm": 3.185279291648942, "learning_rate": 0.00014553406404747387, "loss": 4.1514, "step": 2984 }, { "epoch": 3.41, "grad_norm": 1.106280758692437, "learning_rate": 0.0001453436875993102, "loss": 4.2481, "step": 2985 }, { "epoch": 3.41, "grad_norm": 1.9784269703891317, "learning_rate": 0.0001451533959365933, "loss": 4.2724, "step": 2986 }, { "epoch": 3.41, "grad_norm": 2.3426580021491925, "learning_rate": 0.00014496318916364462, "loss": 4.2256, "step": 2987 }, { "epoch": 3.41, "grad_norm": 2.306280176719563, "learning_rate": 0.00014477306738473877, "loss": 4.2732, "step": 2988 }, { "epoch": 3.42, "grad_norm": 2.1196994893100745, "learning_rate": 0.00014458303070410424, "loss": 4.262, "step": 2989 }, { "epoch": 3.42, "grad_norm": 1.606187711912442, "learning_rate": 0.00014439307922592255, "loss": 4.3625, "step": 2990 }, { "epoch": 3.42, "grad_norm": 2.1791598765739124, "learning_rate": 0.00014420321305432868, "loss": 4.3896, "step": 2991 }, { "epoch": 3.42, "grad_norm": 3.29815582498659, "learning_rate": 0.0001440134322934107, "loss": 4.1172, "step": 2992 }, { "epoch": 3.42, "grad_norm": 1.8883013305514043, "learning_rate": 0.00014382373704721003, "loss": 4.2294, "step": 2993 }, { "epoch": 3.42, "grad_norm": 1.3866533030398822, "learning_rate": 0.000143634127419721, "loss": 4.2159, "step": 2994 }, { "epoch": 3.42, "grad_norm": 2.1449402200607484, "learning_rate": 0.00014344460351489114, "loss": 4.4003, "step": 2995 }, { "epoch": 3.42, "grad_norm": 2.065395906421541, "learning_rate": 0.0001432551654366211, "loss": 4.1781, "step": 2996 }, { "epoch": 3.43, "grad_norm": 1.5146049691480032, "learning_rate": 0.00014306581328876408, "loss": 4.0698, "step": 2997 }, { "epoch": 3.43, "grad_norm": 1.975996406718577, "learning_rate": 0.00014287654717512656, "loss": 4.2594, "step": 2998 }, { "epoch": 3.43, "grad_norm": 3.5128526951227372, "learning_rate": 0.00014268736719946757, "loss": 4.3284, "step": 2999 }, { "epoch": 3.43, "grad_norm": 1.0798124625667997, "learning_rate": 0.00014249827346549945, "loss": 4.3498, "step": 3000 }, { "epoch": 3.43, "eval_blimp_filtered_avg": 0.5197014925373135, "eval_blimp_filtered_std": 0.005688596935991008, "step": 3000 }, { "epoch": 3.43, "eval_blimp_supplement_avg": 0.4870689655172414, "eval_blimp_supplement_std": 0.02269928606166752, "step": 3000 }, { "epoch": 3.43, "eval_vqa_filtered_avg": 0.34, "eval_vqa_filtered_std": 0.047609522856952365, "step": 3000 }, { "epoch": 3.43, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956911, "step": 3000 }, { "epoch": 3.43, "grad_norm": 1.6436427330455934, "learning_rate": 0.0001423092660768865, "loss": 4.231, "step": 3001 }, { "epoch": 3.43, "grad_norm": 2.3511960811735206, "learning_rate": 0.0001421203451372462, "loss": 4.2542, "step": 3002 }, { "epoch": 3.43, "grad_norm": 4.66370572454176, "learning_rate": 0.00014193151075014848, "loss": 4.3803, "step": 3003 }, { "epoch": 3.43, "grad_norm": 1.4421822848651475, "learning_rate": 0.000141742763019116, "loss": 4.2102, "step": 3004 }, { "epoch": 3.43, "grad_norm": 1.8080879933134022, "learning_rate": 0.00014155410204762342, "loss": 4.2323, "step": 3005 }, { "epoch": 3.44, "grad_norm": 1.7669487860711126, "learning_rate": 0.0001413655279390984, "loss": 4.3904, "step": 3006 }, { "epoch": 3.44, "grad_norm": 3.0361315737654704, "learning_rate": 0.00014117704079692054, "loss": 4.4187, "step": 3007 }, { "epoch": 3.44, "grad_norm": 4.701207336255201, "learning_rate": 0.00014098864072442242, "loss": 4.242, "step": 3008 }, { "epoch": 3.44, "grad_norm": 2.2495166091713634, "learning_rate": 0.00014080032782488798, "loss": 4.275, "step": 3009 }, { "epoch": 3.44, "grad_norm": 1.5044501990168426, "learning_rate": 0.00014061210220155402, "loss": 4.3108, "step": 3010 }, { "epoch": 3.44, "grad_norm": 1.0523883606747981, "learning_rate": 0.00014042396395760927, "loss": 4.1203, "step": 3011 }, { "epoch": 3.44, "grad_norm": 1.4149422150403703, "learning_rate": 0.00014023591319619458, "loss": 4.2414, "step": 3012 }, { "epoch": 3.44, "grad_norm": 3.024893102883082, "learning_rate": 0.000140047950020403, "loss": 4.2553, "step": 3013 }, { "epoch": 3.44, "grad_norm": 2.0032707195912827, "learning_rate": 0.00013986007453327908, "loss": 4.2786, "step": 3014 }, { "epoch": 3.45, "grad_norm": 2.138933631785984, "learning_rate": 0.00013967228683781994, "loss": 4.2051, "step": 3015 }, { "epoch": 3.45, "grad_norm": 2.1864625768375885, "learning_rate": 0.00013948458703697415, "loss": 4.2906, "step": 3016 }, { "epoch": 3.45, "grad_norm": 4.4129812990416255, "learning_rate": 0.00013929697523364234, "loss": 4.0548, "step": 3017 }, { "epoch": 3.45, "grad_norm": 1.190366441428171, "learning_rate": 0.00013910945153067654, "loss": 4.1696, "step": 3018 }, { "epoch": 3.45, "grad_norm": 4.144525146551349, "learning_rate": 0.0001389220160308808, "loss": 4.4661, "step": 3019 }, { "epoch": 3.45, "grad_norm": 3.718542328929559, "learning_rate": 0.0001387346688370107, "loss": 4.4426, "step": 3020 }, { "epoch": 3.45, "grad_norm": 5.630238420384331, "learning_rate": 0.0001385474100517736, "loss": 4.362, "step": 3021 }, { "epoch": 3.45, "grad_norm": 3.0696245153162534, "learning_rate": 0.00013836023977782785, "loss": 4.3613, "step": 3022 }, { "epoch": 3.45, "grad_norm": 3.096382582601289, "learning_rate": 0.00013817315811778404, "loss": 4.2421, "step": 3023 }, { "epoch": 3.46, "grad_norm": 2.6621883514489473, "learning_rate": 0.00013798616517420362, "loss": 4.3431, "step": 3024 }, { "epoch": 3.46, "grad_norm": 3.5680718762937573, "learning_rate": 0.0001377992610495997, "loss": 4.2494, "step": 3025 }, { "epoch": 3.46, "grad_norm": 4.230844347163315, "learning_rate": 0.00013761244584643636, "loss": 4.3728, "step": 3026 }, { "epoch": 3.46, "grad_norm": 2.2602157284183937, "learning_rate": 0.00013742571966712927, "loss": 4.3519, "step": 3027 }, { "epoch": 3.46, "grad_norm": 15.745097039324783, "learning_rate": 0.00013723908261404513, "loss": 4.3262, "step": 3028 }, { "epoch": 3.46, "grad_norm": 2.3267379190513386, "learning_rate": 0.00013705253478950186, "loss": 4.2876, "step": 3029 }, { "epoch": 3.46, "grad_norm": 4.41852755468411, "learning_rate": 0.00013686607629576842, "loss": 4.4346, "step": 3030 }, { "epoch": 3.46, "grad_norm": 4.2123105714209625, "learning_rate": 0.00013667970723506474, "loss": 4.5282, "step": 3031 }, { "epoch": 3.47, "grad_norm": 3.904808167090843, "learning_rate": 0.00013649342770956187, "loss": 4.3823, "step": 3032 }, { "epoch": 3.47, "grad_norm": 5.936279287109261, "learning_rate": 0.0001363072378213816, "loss": 4.5298, "step": 3033 }, { "epoch": 3.47, "grad_norm": 2.9108129013374904, "learning_rate": 0.00013612113767259685, "loss": 4.3436, "step": 3034 }, { "epoch": 3.47, "grad_norm": 3.3831317926379496, "learning_rate": 0.0001359351273652309, "loss": 4.4461, "step": 3035 }, { "epoch": 3.47, "grad_norm": 4.18154453168728, "learning_rate": 0.00013574920700125815, "loss": 4.4323, "step": 3036 }, { "epoch": 3.47, "grad_norm": 16.5528760122221, "learning_rate": 0.00013556337668260357, "loss": 4.2725, "step": 3037 }, { "epoch": 3.47, "grad_norm": 5.79312054115292, "learning_rate": 0.0001353776365111428, "loss": 4.4751, "step": 3038 }, { "epoch": 3.47, "grad_norm": 6.047200595299549, "learning_rate": 0.00013519198658870198, "loss": 4.7292, "step": 3039 }, { "epoch": 3.47, "grad_norm": 2.9419929081228426, "learning_rate": 0.0001350064270170579, "loss": 4.5264, "step": 3040 }, { "epoch": 3.48, "grad_norm": 2.7878373552144584, "learning_rate": 0.0001348209578979377, "loss": 4.4431, "step": 3041 }, { "epoch": 3.48, "grad_norm": 8.223590376889106, "learning_rate": 0.00013463557933301909, "loss": 4.322, "step": 3042 }, { "epoch": 3.48, "grad_norm": 1.519733603102312, "learning_rate": 0.0001344502914239298, "loss": 4.3892, "step": 3043 }, { "epoch": 3.48, "grad_norm": 1.9832647442314424, "learning_rate": 0.00013426509427224816, "loss": 4.2745, "step": 3044 }, { "epoch": 3.48, "grad_norm": 1.897762371893992, "learning_rate": 0.00013407998797950274, "loss": 4.2918, "step": 3045 }, { "epoch": 3.48, "grad_norm": 1.8535216277567776, "learning_rate": 0.0001338949726471721, "loss": 4.5687, "step": 3046 }, { "epoch": 3.48, "grad_norm": 2.33197887578216, "learning_rate": 0.00013371004837668538, "loss": 4.361, "step": 3047 }, { "epoch": 3.48, "grad_norm": 1.6676976878923677, "learning_rate": 0.00013352521526942108, "loss": 4.3779, "step": 3048 }, { "epoch": 3.48, "grad_norm": 1.6722103435287188, "learning_rate": 0.0001333404734267084, "loss": 4.2572, "step": 3049 }, { "epoch": 3.49, "grad_norm": 4.393290722937419, "learning_rate": 0.00013315582294982605, "loss": 4.275, "step": 3050 }, { "epoch": 3.49, "grad_norm": 2.8532827231007953, "learning_rate": 0.00013297126394000306, "loss": 4.4644, "step": 3051 }, { "epoch": 3.49, "grad_norm": 1.871736919305201, "learning_rate": 0.0001327867964984178, "loss": 4.4496, "step": 3052 }, { "epoch": 3.49, "grad_norm": 1.4888363751711573, "learning_rate": 0.0001326024207261989, "loss": 4.2669, "step": 3053 }, { "epoch": 3.49, "grad_norm": 24.74357002132999, "learning_rate": 0.00013241813672442443, "loss": 4.3262, "step": 3054 }, { "epoch": 3.49, "grad_norm": 3.737967202176959, "learning_rate": 0.00013223394459412259, "loss": 4.2771, "step": 3055 }, { "epoch": 3.49, "grad_norm": 2.886326663166909, "learning_rate": 0.00013204984443627058, "loss": 4.2825, "step": 3056 }, { "epoch": 3.49, "grad_norm": 3.6332651417966773, "learning_rate": 0.00013186583635179565, "loss": 4.2213, "step": 3057 }, { "epoch": 3.49, "grad_norm": 1.4589196300023728, "learning_rate": 0.00013168192044157436, "loss": 4.1446, "step": 3058 }, { "epoch": 3.5, "grad_norm": 2.465725797608277, "learning_rate": 0.000131498096806433, "loss": 4.1862, "step": 3059 }, { "epoch": 3.5, "grad_norm": 1.2582594539753975, "learning_rate": 0.00013131436554714686, "loss": 4.2512, "step": 3060 }, { "epoch": 3.5, "grad_norm": 3.150010817797145, "learning_rate": 0.00013113072676444076, "loss": 4.445, "step": 3061 }, { "epoch": 3.5, "grad_norm": 3.756535486477678, "learning_rate": 0.00013094718055898908, "loss": 4.1179, "step": 3062 }, { "epoch": 3.5, "grad_norm": 2.101984529989566, "learning_rate": 0.0001307637270314152, "loss": 4.4073, "step": 3063 }, { "epoch": 3.5, "grad_norm": 1.8832664707958415, "learning_rate": 0.00013058036628229178, "loss": 4.2018, "step": 3064 }, { "epoch": 3.5, "grad_norm": 2.969209726838927, "learning_rate": 0.0001303970984121404, "loss": 4.4063, "step": 3065 }, { "epoch": 3.5, "grad_norm": 3.1092568640442844, "learning_rate": 0.00013021392352143194, "loss": 4.1874, "step": 3066 }, { "epoch": 3.51, "grad_norm": 1.557779816221156, "learning_rate": 0.00013003084171058632, "loss": 4.4365, "step": 3067 }, { "epoch": 3.51, "grad_norm": 1.9065960211963298, "learning_rate": 0.00012984785307997232, "loss": 4.4596, "step": 3068 }, { "epoch": 3.51, "grad_norm": 1.6412265781645923, "learning_rate": 0.00012966495772990773, "loss": 4.3305, "step": 3069 }, { "epoch": 3.51, "grad_norm": 1.9166560653950269, "learning_rate": 0.0001294821557606591, "loss": 4.1937, "step": 3070 }, { "epoch": 3.51, "grad_norm": 3.823990664818364, "learning_rate": 0.0001292994472724419, "loss": 4.1603, "step": 3071 }, { "epoch": 3.51, "grad_norm": 2.176613747211886, "learning_rate": 0.00012911683236542043, "loss": 4.2939, "step": 3072 }, { "epoch": 3.51, "grad_norm": 5.705871117814191, "learning_rate": 0.00012893431113970724, "loss": 4.2088, "step": 3073 }, { "epoch": 3.51, "grad_norm": 3.225952730177573, "learning_rate": 0.000128751883695364, "loss": 4.2714, "step": 3074 }, { "epoch": 3.51, "grad_norm": 1.8308644730945545, "learning_rate": 0.00012856955013240085, "loss": 4.2944, "step": 3075 }, { "epoch": 3.52, "grad_norm": 2.669974973757755, "learning_rate": 0.00012838731055077635, "loss": 4.3037, "step": 3076 }, { "epoch": 3.52, "grad_norm": 1.2651499432267799, "learning_rate": 0.0001282051650503976, "loss": 4.1507, "step": 3077 }, { "epoch": 3.52, "grad_norm": 1.8335588641497982, "learning_rate": 0.00012802311373112017, "loss": 4.1116, "step": 3078 }, { "epoch": 3.52, "grad_norm": 1.236075470456007, "learning_rate": 0.00012784115669274795, "loss": 4.4823, "step": 3079 }, { "epoch": 3.52, "grad_norm": 1.5548947412162477, "learning_rate": 0.00012765929403503313, "loss": 4.4347, "step": 3080 }, { "epoch": 3.52, "grad_norm": 2.994862463900862, "learning_rate": 0.00012747752585767628, "loss": 4.2839, "step": 3081 }, { "epoch": 3.52, "grad_norm": 1.2930882751661772, "learning_rate": 0.0001272958522603259, "loss": 4.341, "step": 3082 }, { "epoch": 3.52, "grad_norm": 2.0226806945517026, "learning_rate": 0.00012711427334257888, "loss": 4.2458, "step": 3083 }, { "epoch": 3.52, "grad_norm": 1.5543192466290554, "learning_rate": 0.00012693278920398015, "loss": 4.2223, "step": 3084 }, { "epoch": 3.53, "grad_norm": 1.3588071336539917, "learning_rate": 0.00012675139994402265, "loss": 4.3066, "step": 3085 }, { "epoch": 3.53, "grad_norm": 1.4047977383946104, "learning_rate": 0.00012657010566214733, "loss": 4.2968, "step": 3086 }, { "epoch": 3.53, "grad_norm": 1.3922275213392041, "learning_rate": 0.00012638890645774308, "loss": 4.4457, "step": 3087 }, { "epoch": 3.53, "grad_norm": 1.3825169856842991, "learning_rate": 0.00012620780243014661, "loss": 4.3136, "step": 3088 }, { "epoch": 3.53, "grad_norm": 1.0878809705456396, "learning_rate": 0.00012602679367864264, "loss": 4.1826, "step": 3089 }, { "epoch": 3.53, "grad_norm": 0.9086193017497569, "learning_rate": 0.0001258458803024633, "loss": 4.3718, "step": 3090 }, { "epoch": 3.53, "grad_norm": 1.2267699920465889, "learning_rate": 0.00012566506240078873, "loss": 4.3077, "step": 3091 }, { "epoch": 3.53, "grad_norm": 0.9264328262098767, "learning_rate": 0.00012548434007274664, "loss": 4.0587, "step": 3092 }, { "epoch": 3.53, "grad_norm": 0.7499985178392582, "learning_rate": 0.0001253037134174124, "loss": 4.2844, "step": 3093 }, { "epoch": 3.54, "grad_norm": 1.1674273928111123, "learning_rate": 0.00012512318253380875, "loss": 4.0244, "step": 3094 }, { "epoch": 3.54, "grad_norm": 0.9022225282894355, "learning_rate": 0.00012494274752090616, "loss": 4.3325, "step": 3095 }, { "epoch": 3.54, "grad_norm": 2.146768678380222, "learning_rate": 0.00012476240847762244, "loss": 4.3459, "step": 3096 }, { "epoch": 3.54, "grad_norm": 0.9810808689992113, "learning_rate": 0.00012458216550282277, "loss": 4.1667, "step": 3097 }, { "epoch": 3.54, "grad_norm": 6.0582922200120235, "learning_rate": 0.00012440201869531975, "loss": 4.3355, "step": 3098 }, { "epoch": 3.54, "grad_norm": 2.519643169606475, "learning_rate": 0.000124221968153873, "loss": 4.3129, "step": 3099 }, { "epoch": 3.54, "grad_norm": 2.3701151590782272, "learning_rate": 0.00012404201397718967, "loss": 4.2838, "step": 3100 }, { "epoch": 3.54, "eval_blimp_filtered_avg": 0.524179104477612, "eval_blimp_filtered_std": 0.0056781049766562505, "step": 3100 }, { "epoch": 3.54, "eval_blimp_supplement_avg": 0.49137931034482757, "eval_blimp_supplement_std": 0.022838162296649936, "step": 3100 }, { "epoch": 3.54, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 3100 }, { "epoch": 3.54, "eval_winoground_filtered_avg": 0.48, "eval_winoground_filtered_std": 0.05021167315686779, "step": 3100 }, { "epoch": 3.54, "grad_norm": 2.9375096741935365, "learning_rate": 0.00012386215626392384, "loss": 4.5588, "step": 3101 }, { "epoch": 3.55, "grad_norm": 4.378407301602557, "learning_rate": 0.00012368239511267715, "loss": 4.3454, "step": 3102 }, { "epoch": 3.55, "grad_norm": 1.089014409595611, "learning_rate": 0.00012350273062199766, "loss": 3.9342, "step": 3103 }, { "epoch": 3.55, "grad_norm": 1.1699366863977172, "learning_rate": 0.00012332316289038087, "loss": 4.1125, "step": 3104 }, { "epoch": 3.55, "grad_norm": 3.204872955934147, "learning_rate": 0.00012314369201626911, "loss": 4.2535, "step": 3105 }, { "epoch": 3.55, "grad_norm": 4.7205064960448455, "learning_rate": 0.00012296431809805182, "loss": 4.2734, "step": 3106 }, { "epoch": 3.55, "grad_norm": 3.202148509512982, "learning_rate": 0.00012278504123406478, "loss": 4.4724, "step": 3107 }, { "epoch": 3.55, "grad_norm": 2.7686944574745014, "learning_rate": 0.00012260586152259092, "loss": 4.5066, "step": 3108 }, { "epoch": 3.55, "grad_norm": 8.71885220518892, "learning_rate": 0.00012242677906186005, "loss": 4.243, "step": 3109 }, { "epoch": 3.55, "grad_norm": 2.7544285409264044, "learning_rate": 0.00012224779395004852, "loss": 4.2614, "step": 3110 }, { "epoch": 3.56, "grad_norm": 4.294749690811909, "learning_rate": 0.00012206890628527904, "loss": 4.436, "step": 3111 }, { "epoch": 3.56, "grad_norm": 3.7598462872058005, "learning_rate": 0.00012189011616562127, "loss": 4.5016, "step": 3112 }, { "epoch": 3.56, "grad_norm": 4.225818506964256, "learning_rate": 0.00012171142368909123, "loss": 4.2988, "step": 3113 }, { "epoch": 3.56, "grad_norm": 2.806252745533295, "learning_rate": 0.00012153282895365153, "loss": 4.405, "step": 3114 }, { "epoch": 3.56, "grad_norm": 3.067706310068835, "learning_rate": 0.00012135433205721077, "loss": 4.1811, "step": 3115 }, { "epoch": 3.56, "grad_norm": 1.1953221608599018, "learning_rate": 0.00012117593309762463, "loss": 4.0753, "step": 3116 }, { "epoch": 3.56, "grad_norm": 2.186683538193241, "learning_rate": 0.00012099763217269456, "loss": 4.3185, "step": 3117 }, { "epoch": 3.56, "grad_norm": 1.9057106230099483, "learning_rate": 0.00012081942938016842, "loss": 4.3192, "step": 3118 }, { "epoch": 3.56, "grad_norm": 3.359578691851843, "learning_rate": 0.0001206413248177404, "loss": 4.2813, "step": 3119 }, { "epoch": 3.57, "grad_norm": 3.6819529150124284, "learning_rate": 0.00012046331858305047, "loss": 4.5102, "step": 3120 }, { "epoch": 3.57, "grad_norm": 1.667478221373015, "learning_rate": 0.00012028541077368506, "loss": 4.3778, "step": 3121 }, { "epoch": 3.57, "grad_norm": 2.014952398636443, "learning_rate": 0.00012010760148717653, "loss": 4.372, "step": 3122 }, { "epoch": 3.57, "grad_norm": 1.5770755840992674, "learning_rate": 0.0001199298908210032, "loss": 4.2297, "step": 3123 }, { "epoch": 3.57, "grad_norm": 1.266365585949548, "learning_rate": 0.00011975227887258933, "loss": 4.1576, "step": 3124 }, { "epoch": 3.57, "grad_norm": 2.9924150220920467, "learning_rate": 0.00011957476573930507, "loss": 4.1433, "step": 3125 }, { "epoch": 3.57, "grad_norm": 1.8245270224822032, "learning_rate": 0.00011939735151846642, "loss": 4.3717, "step": 3126 }, { "epoch": 3.57, "grad_norm": 1.694543978596632, "learning_rate": 0.00011922003630733522, "loss": 4.3395, "step": 3127 }, { "epoch": 3.57, "grad_norm": 2.447020811583496, "learning_rate": 0.00011904282020311871, "loss": 4.1883, "step": 3128 }, { "epoch": 3.58, "grad_norm": 1.0138835020090087, "learning_rate": 0.00011886570330297018, "loss": 4.4642, "step": 3129 }, { "epoch": 3.58, "grad_norm": 1.5319929330773685, "learning_rate": 0.00011868868570398838, "loss": 4.1568, "step": 3130 }, { "epoch": 3.58, "grad_norm": 1.0368096271825178, "learning_rate": 0.00011851176750321761, "loss": 4.3352, "step": 3131 }, { "epoch": 3.58, "grad_norm": 0.8418310586664867, "learning_rate": 0.00011833494879764769, "loss": 4.3116, "step": 3132 }, { "epoch": 3.58, "grad_norm": 1.4333606281546263, "learning_rate": 0.00011815822968421394, "loss": 4.1728, "step": 3133 }, { "epoch": 3.58, "grad_norm": 1.3765481568000348, "learning_rate": 0.00011798161025979698, "loss": 4.166, "step": 3134 }, { "epoch": 3.58, "grad_norm": 1.409694056079145, "learning_rate": 0.0001178050906212229, "loss": 4.1542, "step": 3135 }, { "epoch": 3.58, "grad_norm": 1.2859801934562816, "learning_rate": 0.00011762867086526309, "loss": 4.2194, "step": 3136 }, { "epoch": 3.59, "grad_norm": 1.0927998615615322, "learning_rate": 0.00011745235108863393, "loss": 4.1262, "step": 3137 }, { "epoch": 3.59, "grad_norm": 1.941342255091278, "learning_rate": 0.00011727613138799725, "loss": 4.3441, "step": 3138 }, { "epoch": 3.59, "grad_norm": 5.159742348676343, "learning_rate": 0.00011710001185996, "loss": 4.2176, "step": 3139 }, { "epoch": 3.59, "grad_norm": 1.7638751716054368, "learning_rate": 0.0001169239926010741, "loss": 4.141, "step": 3140 }, { "epoch": 3.59, "grad_norm": 2.5520958370345697, "learning_rate": 0.00011674807370783662, "loss": 4.2331, "step": 3141 }, { "epoch": 3.59, "grad_norm": 1.1683412045532688, "learning_rate": 0.00011657225527668947, "loss": 4.1658, "step": 3142 }, { "epoch": 3.59, "grad_norm": 1.8746378592857438, "learning_rate": 0.00011639653740401966, "loss": 4.0201, "step": 3143 }, { "epoch": 3.59, "grad_norm": 1.5440608451044426, "learning_rate": 0.00011622092018615895, "loss": 4.236, "step": 3144 }, { "epoch": 3.59, "grad_norm": 1.8732967379437504, "learning_rate": 0.00011604540371938386, "loss": 3.9769, "step": 3145 }, { "epoch": 3.6, "grad_norm": 2.442074961300071, "learning_rate": 0.0001158699880999158, "loss": 4.1153, "step": 3146 }, { "epoch": 3.6, "grad_norm": 0.9822758352214246, "learning_rate": 0.00011569467342392083, "loss": 4.2418, "step": 3147 }, { "epoch": 3.6, "grad_norm": 2.7592449958393845, "learning_rate": 0.00011551945978750975, "loss": 4.2548, "step": 3148 }, { "epoch": 3.6, "grad_norm": 1.1706133308532032, "learning_rate": 0.00011534434728673787, "loss": 4.2264, "step": 3149 }, { "epoch": 3.6, "grad_norm": 2.8029338713224283, "learning_rate": 0.00011516933601760513, "loss": 4.1473, "step": 3150 }, { "epoch": 3.6, "grad_norm": 2.222635934742633, "learning_rate": 0.00011499442607605588, "loss": 4.3166, "step": 3151 }, { "epoch": 3.6, "grad_norm": 0.885673349352131, "learning_rate": 0.00011481961755797905, "loss": 4.2234, "step": 3152 }, { "epoch": 3.6, "grad_norm": 1.3806666382794839, "learning_rate": 0.00011464491055920799, "loss": 4.222, "step": 3153 }, { "epoch": 3.6, "grad_norm": 0.9658888238027514, "learning_rate": 0.00011447030517552006, "loss": 4.0435, "step": 3154 }, { "epoch": 3.61, "grad_norm": 1.3749139340789496, "learning_rate": 0.00011429580150263716, "loss": 4.2326, "step": 3155 }, { "epoch": 3.61, "grad_norm": 0.695970526585966, "learning_rate": 0.00011412139963622567, "loss": 4.272, "step": 3156 }, { "epoch": 3.61, "grad_norm": 1.1535700634350357, "learning_rate": 0.00011394709967189588, "loss": 4.1741, "step": 3157 }, { "epoch": 3.61, "grad_norm": 7.115065204279877, "learning_rate": 0.00011377290170520206, "loss": 4.0572, "step": 3158 }, { "epoch": 3.61, "grad_norm": 1.4388851785556964, "learning_rate": 0.00011359880583164287, "loss": 4.2236, "step": 3159 }, { "epoch": 3.61, "grad_norm": 3.0934226093900263, "learning_rate": 0.00011342481214666084, "loss": 4.1748, "step": 3160 }, { "epoch": 3.61, "grad_norm": 2.7281681461147604, "learning_rate": 0.00011325092074564264, "loss": 4.2886, "step": 3161 }, { "epoch": 3.61, "grad_norm": 2.542614991541466, "learning_rate": 0.00011307713172391848, "loss": 4.1472, "step": 3162 }, { "epoch": 3.61, "grad_norm": 2.2095345612314774, "learning_rate": 0.00011290344517676295, "loss": 4.3034, "step": 3163 }, { "epoch": 3.62, "grad_norm": 1.2965357461673255, "learning_rate": 0.00011272986119939418, "loss": 4.0354, "step": 3164 }, { "epoch": 3.62, "grad_norm": 0.963141755544075, "learning_rate": 0.0001125563798869742, "loss": 4.1113, "step": 3165 }, { "epoch": 3.62, "grad_norm": 1.789941778426034, "learning_rate": 0.00011238300133460845, "loss": 4.2851, "step": 3166 }, { "epoch": 3.62, "grad_norm": 3.002049566007964, "learning_rate": 0.00011220972563734632, "loss": 4.2174, "step": 3167 }, { "epoch": 3.62, "grad_norm": 1.7274097260894765, "learning_rate": 0.00011203655289018086, "loss": 4.1929, "step": 3168 }, { "epoch": 3.62, "grad_norm": 2.106778316585251, "learning_rate": 0.00011186348318804845, "loss": 4.2726, "step": 3169 }, { "epoch": 3.62, "grad_norm": 2.8378010754076683, "learning_rate": 0.00011169051662582916, "loss": 4.1998, "step": 3170 }, { "epoch": 3.62, "grad_norm": 2.1867282485230852, "learning_rate": 0.00011151765329834646, "loss": 4.1752, "step": 3171 }, { "epoch": 3.63, "grad_norm": 1.4948749714851985, "learning_rate": 0.00011134489330036719, "loss": 4.3977, "step": 3172 }, { "epoch": 3.63, "grad_norm": 1.9128013797905492, "learning_rate": 0.00011117223672660158, "loss": 4.1496, "step": 3173 }, { "epoch": 3.63, "grad_norm": 1.656615464499988, "learning_rate": 0.00011099968367170323, "loss": 4.2879, "step": 3174 }, { "epoch": 3.63, "grad_norm": 1.416655072767785, "learning_rate": 0.00011082723423026874, "loss": 4.0094, "step": 3175 }, { "epoch": 3.63, "grad_norm": 2.5893064361488327, "learning_rate": 0.00011065488849683818, "loss": 4.2027, "step": 3176 }, { "epoch": 3.63, "grad_norm": 2.2537767036179863, "learning_rate": 0.00011048264656589465, "loss": 4.1892, "step": 3177 }, { "epoch": 3.63, "grad_norm": 1.0060924162646234, "learning_rate": 0.00011031050853186438, "loss": 4.0344, "step": 3178 }, { "epoch": 3.63, "grad_norm": 1.3366283840610342, "learning_rate": 0.0001101384744891166, "loss": 4.3908, "step": 3179 }, { "epoch": 3.63, "grad_norm": 0.8687553511873695, "learning_rate": 0.00010996654453196356, "loss": 4.1052, "step": 3180 }, { "epoch": 3.64, "grad_norm": 1.1887440945028214, "learning_rate": 0.0001097947187546604, "loss": 4.1931, "step": 3181 }, { "epoch": 3.64, "grad_norm": 1.3985423700493347, "learning_rate": 0.00010962299725140534, "loss": 4.2072, "step": 3182 }, { "epoch": 3.64, "grad_norm": 1.8968060361497714, "learning_rate": 0.00010945138011633903, "loss": 4.3608, "step": 3183 }, { "epoch": 3.64, "grad_norm": 1.857585430734205, "learning_rate": 0.00010927986744354533, "loss": 4.1073, "step": 3184 }, { "epoch": 3.64, "grad_norm": 1.0413404818229504, "learning_rate": 0.00010910845932705054, "loss": 4.1282, "step": 3185 }, { "epoch": 3.64, "grad_norm": 0.7905769906002212, "learning_rate": 0.00010893715586082387, "loss": 4.1826, "step": 3186 }, { "epoch": 3.64, "grad_norm": 0.6463764620123684, "learning_rate": 0.00010876595713877697, "loss": 4.061, "step": 3187 }, { "epoch": 3.64, "grad_norm": 0.8462381259341787, "learning_rate": 0.00010859486325476415, "loss": 4.1246, "step": 3188 }, { "epoch": 3.64, "grad_norm": 1.5099689144825883, "learning_rate": 0.00010842387430258225, "loss": 4.011, "step": 3189 }, { "epoch": 3.65, "grad_norm": 2.1441839036541523, "learning_rate": 0.00010825299037597061, "loss": 4.25, "step": 3190 }, { "epoch": 3.65, "grad_norm": 0.9719093985140902, "learning_rate": 0.00010808221156861099, "loss": 4.2564, "step": 3191 }, { "epoch": 3.65, "grad_norm": 1.68468448544817, "learning_rate": 0.00010791153797412731, "loss": 4.1785, "step": 3192 }, { "epoch": 3.65, "grad_norm": 1.6631575954279587, "learning_rate": 0.00010774096968608607, "loss": 4.3495, "step": 3193 }, { "epoch": 3.65, "grad_norm": 0.8665137587518054, "learning_rate": 0.0001075705067979959, "loss": 4.1427, "step": 3194 }, { "epoch": 3.65, "grad_norm": 2.7799527670244935, "learning_rate": 0.00010740014940330798, "loss": 4.1706, "step": 3195 }, { "epoch": 3.65, "grad_norm": 2.6702246367087508, "learning_rate": 0.00010722989759541505, "loss": 4.2128, "step": 3196 }, { "epoch": 3.65, "grad_norm": 0.758468281368008, "learning_rate": 0.00010705975146765246, "loss": 3.9807, "step": 3197 }, { "epoch": 3.65, "grad_norm": 1.255780622128858, "learning_rate": 0.00010688971111329741, "loss": 4.1501, "step": 3198 }, { "epoch": 3.66, "grad_norm": 1.1504997281648268, "learning_rate": 0.00010671977662556928, "loss": 4.0531, "step": 3199 }, { "epoch": 3.66, "grad_norm": 1.089672354865873, "learning_rate": 0.00010654994809762908, "loss": 4.1766, "step": 3200 }, { "epoch": 3.66, "eval_blimp_filtered_avg": 0.5319402985074627, "eval_blimp_filtered_std": 0.005633106409838074, "step": 3200 }, { "epoch": 3.66, "eval_blimp_supplement_avg": 0.4525862068965517, "eval_blimp_supplement_std": 0.022746037902290208, "step": 3200 }, { "epoch": 3.66, "eval_vqa_filtered_avg": 0.35, "eval_vqa_filtered_std": 0.0479372485441102, "step": 3200 }, { "epoch": 3.66, "eval_winoground_filtered_avg": 0.49, "eval_winoground_filtered_std": 0.05024183937956912, "step": 3200 }, { "epoch": 3.66, "grad_norm": 17.624596657582504, "learning_rate": 0.00010638022562258005, "loss": 3.9853, "step": 3201 }, { "epoch": 3.66, "grad_norm": 1.6263463493549037, "learning_rate": 0.0001062106092934671, "loss": 4.097, "step": 3202 }, { "epoch": 3.66, "grad_norm": 3.209920700115739, "learning_rate": 0.0001060410992032773, "loss": 4.2575, "step": 3203 }, { "epoch": 3.66, "grad_norm": 2.783076178958525, "learning_rate": 0.00010587169544493896, "loss": 4.2889, "step": 3204 }, { "epoch": 3.66, "grad_norm": 2.996439327742222, "learning_rate": 0.00010570239811132243, "loss": 3.9889, "step": 3205 }, { "epoch": 3.66, "grad_norm": 2.0579089603522176, "learning_rate": 0.00010553320729523962, "loss": 4.2335, "step": 3206 }, { "epoch": 3.67, "grad_norm": 4.133424724314067, "learning_rate": 0.00010536412308944408, "loss": 4.2679, "step": 3207 }, { "epoch": 3.67, "grad_norm": 1.1283417793065755, "learning_rate": 0.00010519514558663098, "loss": 4.1976, "step": 3208 }, { "epoch": 3.67, "grad_norm": 2.988478409147808, "learning_rate": 0.0001050262748794366, "loss": 3.9076, "step": 3209 }, { "epoch": 3.67, "grad_norm": 1.7153340804430834, "learning_rate": 0.00010485751106043932, "loss": 4.2141, "step": 3210 }, { "epoch": 3.67, "grad_norm": 1.3737954092140663, "learning_rate": 0.00010468885422215845, "loss": 4.2177, "step": 3211 }, { "epoch": 3.67, "grad_norm": 14.555643417005202, "learning_rate": 0.00010452030445705487, "loss": 4.0235, "step": 3212 }, { "epoch": 3.67, "grad_norm": 1.4220353288260394, "learning_rate": 0.00010435186185753051, "loss": 4.0247, "step": 3213 }, { "epoch": 3.67, "grad_norm": 1.3890010121995073, "learning_rate": 0.00010418352651592879, "loss": 4.2261, "step": 3214 }, { "epoch": 3.67, "grad_norm": 2.4361133666767323, "learning_rate": 0.00010401529852453425, "loss": 4.1769, "step": 3215 }, { "epoch": 3.68, "grad_norm": 2.9002874396660743, "learning_rate": 0.0001038471779755727, "loss": 4.1125, "step": 3216 }, { "epoch": 3.68, "grad_norm": 2.557519639292111, "learning_rate": 0.00010367916496121065, "loss": 4.0642, "step": 3217 }, { "epoch": 3.68, "grad_norm": 1.3727300775540912, "learning_rate": 0.00010351125957355618, "loss": 4.1876, "step": 3218 }, { "epoch": 3.68, "grad_norm": 1.1193755135877852, "learning_rate": 0.00010334346190465808, "loss": 4.0663, "step": 3219 }, { "epoch": 3.68, "grad_norm": 1.1391317213768857, "learning_rate": 0.00010317577204650618, "loss": 4.13, "step": 3220 }, { "epoch": 3.68, "grad_norm": 1.3626984297008808, "learning_rate": 0.00010300819009103097, "loss": 4.1271, "step": 3221 }, { "epoch": 3.68, "grad_norm": 1.2064215194048413, "learning_rate": 0.00010284071613010411, "loss": 4.1464, "step": 3222 }, { "epoch": 3.68, "grad_norm": 1.6506468988556247, "learning_rate": 0.0001026733502555379, "loss": 4.2576, "step": 3223 }, { "epoch": 3.68, "grad_norm": 2.4408219361705012, "learning_rate": 0.0001025060925590854, "loss": 4.3597, "step": 3224 }, { "epoch": 3.69, "grad_norm": 2.975816973957695, "learning_rate": 0.00010233894313244034, "loss": 4.2039, "step": 3225 }, { "epoch": 3.69, "grad_norm": 1.6923327159694936, "learning_rate": 0.00010217190206723715, "loss": 4.1259, "step": 3226 }, { "epoch": 3.69, "grad_norm": 1.75617780631274, "learning_rate": 0.00010200496945505084, "loss": 4.2911, "step": 3227 }, { "epoch": 3.69, "grad_norm": 1.8381129235319367, "learning_rate": 0.00010183814538739694, "loss": 4.0165, "step": 3228 }, { "epoch": 3.69, "grad_norm": 1.4280732725291616, "learning_rate": 0.00010167142995573154, "loss": 4.1849, "step": 3229 }, { "epoch": 3.69, "grad_norm": 1.4982453256081794, "learning_rate": 0.000101504823251451, "loss": 4.1591, "step": 3230 }, { "epoch": 3.69, "grad_norm": 3.7469732626833023, "learning_rate": 0.00010133832536589224, "loss": 4.2803, "step": 3231 }, { "epoch": 3.69, "grad_norm": 1.1529116702576152, "learning_rate": 0.00010117193639033249, "loss": 4.0944, "step": 3232 }, { "epoch": 3.69, "grad_norm": 1.1672656585798726, "learning_rate": 0.00010100565641598926, "loss": 4.0324, "step": 3233 }, { "epoch": 3.7, "grad_norm": 0.9166189043871061, "learning_rate": 0.0001008394855340203, "loss": 4.3547, "step": 3234 }, { "epoch": 3.7, "grad_norm": 0.9693910228231015, "learning_rate": 0.00010067342383552357, "loss": 4.0933, "step": 3235 }, { "epoch": 3.7, "grad_norm": 1.3116432040666297, "learning_rate": 0.0001005074714115371, "loss": 4.1041, "step": 3236 }, { "epoch": 3.7, "grad_norm": 1.7211312162696133, "learning_rate": 0.00010034162835303927, "loss": 4.3542, "step": 3237 }, { "epoch": 3.7, "grad_norm": 1.462357344516288, "learning_rate": 0.00010017589475094801, "loss": 4.1624, "step": 3238 }, { "epoch": 3.7, "grad_norm": 1.0650186947599927, "learning_rate": 0.00010001027069612171, "loss": 4.3794, "step": 3239 }, { "epoch": 3.7, "grad_norm": 3.207204542131583, "learning_rate": 9.984475627935852e-05, "loss": 4.1557, "step": 3240 }, { "epoch": 3.7, "grad_norm": 1.937017961251534, "learning_rate": 9.96793515913964e-05, "loss": 3.9887, "step": 3241 }, { "epoch": 3.71, "grad_norm": 1.0408674193195766, "learning_rate": 9.951405672291355e-05, "loss": 4.1979, "step": 3242 }, { "epoch": 3.71, "grad_norm": 1.223234740869277, "learning_rate": 9.934887176452736e-05, "loss": 4.2106, "step": 3243 }, { "epoch": 3.71, "grad_norm": 1.3700182092203745, "learning_rate": 9.918379680679539e-05, "loss": 4.2406, "step": 3244 }, { "epoch": 3.71, "grad_norm": 1.5267077243572735, "learning_rate": 9.901883194021478e-05, "loss": 4.288, "step": 3245 }, { "epoch": 3.71, "grad_norm": 0.9884526486185409, "learning_rate": 9.88539772552224e-05, "loss": 4.021, "step": 3246 }, { "epoch": 3.71, "grad_norm": 1.0788574716826151, "learning_rate": 9.868923284219444e-05, "loss": 4.2422, "step": 3247 }, { "epoch": 3.71, "grad_norm": 1.4943213159696211, "learning_rate": 9.852459879144692e-05, "loss": 4.1849, "step": 3248 }, { "epoch": 3.71, "grad_norm": 0.9806223802360323, "learning_rate": 9.836007519323522e-05, "loss": 4.4687, "step": 3249 }, { "epoch": 3.71, "grad_norm": 3.269250434137489, "learning_rate": 9.819566213775443e-05, "loss": 4.0501, "step": 3250 }, { "epoch": 3.72, "grad_norm": 1.6761704985054842, "learning_rate": 9.80313597151386e-05, "loss": 4.1408, "step": 3251 }, { "epoch": 3.72, "grad_norm": 1.5472677601860343, "learning_rate": 9.786716801546142e-05, "loss": 4.0619, "step": 3252 }, { "epoch": 3.72, "grad_norm": 1.1358888897344144, "learning_rate": 9.770308712873581e-05, "loss": 4.3279, "step": 3253 }, { "epoch": 3.72, "grad_norm": 1.480067135718931, "learning_rate": 9.753911714491404e-05, "loss": 4.3261, "step": 3254 }, { "epoch": 3.72, "grad_norm": 1.0811280153870966, "learning_rate": 9.737525815388729e-05, "loss": 4.0897, "step": 3255 }, { "epoch": 3.72, "grad_norm": 1.0434933165104079, "learning_rate": 9.721151024548611e-05, "loss": 4.0344, "step": 3256 }, { "epoch": 3.72, "grad_norm": 2.185854350221175, "learning_rate": 9.704787350948029e-05, "loss": 4.2038, "step": 3257 }, { "epoch": 3.72, "grad_norm": 0.9744530113388489, "learning_rate": 9.68843480355784e-05, "loss": 4.1134, "step": 3258 }, { "epoch": 3.72, "grad_norm": 0.8843149093418575, "learning_rate": 9.672093391342825e-05, "loss": 4.2737, "step": 3259 }, { "epoch": 3.73, "grad_norm": 0.8910872823814119, "learning_rate": 9.655763123261631e-05, "loss": 3.9506, "step": 3260 }, { "epoch": 3.73, "grad_norm": 5.2844768925124335, "learning_rate": 9.639444008266817e-05, "loss": 4.1122, "step": 3261 }, { "epoch": 3.73, "grad_norm": 1.2949832238663532, "learning_rate": 9.623136055304828e-05, "loss": 4.1638, "step": 3262 }, { "epoch": 3.73, "grad_norm": 1.7135253741172285, "learning_rate": 9.606839273315978e-05, "loss": 4.4198, "step": 3263 }, { "epoch": 3.73, "grad_norm": 1.2490755765761812, "learning_rate": 9.590553671234472e-05, "loss": 4.2548, "step": 3264 }, { "epoch": 3.73, "grad_norm": 2.0797117499592335, "learning_rate": 9.574279257988374e-05, "loss": 4.0479, "step": 3265 }, { "epoch": 3.73, "grad_norm": 2.2804598360879362, "learning_rate": 9.558016042499618e-05, "loss": 3.988, "step": 3266 }, { "epoch": 3.73, "grad_norm": 1.2480021694670407, "learning_rate": 9.541764033684007e-05, "loss": 4.161, "step": 3267 }, { "epoch": 3.73, "grad_norm": 1.2354616235579359, "learning_rate": 9.525523240451175e-05, "loss": 4.031, "step": 3268 }, { "epoch": 3.74, "grad_norm": 1.4343356140305725, "learning_rate": 9.509293671704638e-05, "loss": 4.2028, "step": 3269 }, { "epoch": 3.74, "grad_norm": 0.956698078951955, "learning_rate": 9.493075336341737e-05, "loss": 4.424, "step": 3270 }, { "epoch": 3.74, "grad_norm": 2.0366110761678033, "learning_rate": 9.476868243253671e-05, "loss": 4.3983, "step": 3271 }, { "epoch": 3.74, "grad_norm": 1.785195998698921, "learning_rate": 9.460672401325466e-05, "loss": 4.2526, "step": 3272 }, { "epoch": 3.74, "grad_norm": 1.3155356385914907, "learning_rate": 9.444487819435977e-05, "loss": 4.1495, "step": 3273 }, { "epoch": 3.74, "grad_norm": 1.056860310659244, "learning_rate": 9.428314506457894e-05, "loss": 4.3833, "step": 3274 }, { "epoch": 3.74, "grad_norm": 1.1653591407948571, "learning_rate": 9.412152471257728e-05, "loss": 4.2599, "step": 3275 }, { "epoch": 3.74, "grad_norm": 1.8286454115353377, "learning_rate": 9.396001722695809e-05, "loss": 4.2253, "step": 3276 }, { "epoch": 3.75, "grad_norm": 1.5861643368028653, "learning_rate": 9.379862269626262e-05, "loss": 4.1681, "step": 3277 }, { "epoch": 3.75, "grad_norm": 1.091196265825571, "learning_rate": 9.363734120897043e-05, "loss": 4.3002, "step": 3278 }, { "epoch": 3.75, "grad_norm": 1.3100802908434868, "learning_rate": 9.347617285349894e-05, "loss": 4.2181, "step": 3279 }, { "epoch": 3.75, "grad_norm": 1.2825737921492437, "learning_rate": 9.331511771820368e-05, "loss": 4.1342, "step": 3280 }, { "epoch": 3.75, "grad_norm": 1.616139149561849, "learning_rate": 9.315417589137805e-05, "loss": 4.2311, "step": 3281 }, { "epoch": 3.75, "grad_norm": 1.2390516585918243, "learning_rate": 9.299334746125333e-05, "loss": 4.0458, "step": 3282 }, { "epoch": 3.75, "grad_norm": 1.6710136589694056, "learning_rate": 9.283263251599861e-05, "loss": 4.1816, "step": 3283 }, { "epoch": 3.75, "grad_norm": 1.5045541227550596, "learning_rate": 9.26720311437209e-05, "loss": 3.975, "step": 3284 }, { "epoch": 3.75, "grad_norm": 1.256729370826007, "learning_rate": 9.251154343246463e-05, "loss": 4.1423, "step": 3285 }, { "epoch": 3.76, "grad_norm": 1.1622868091854917, "learning_rate": 9.235116947021225e-05, "loss": 4.336, "step": 3286 }, { "epoch": 3.76, "grad_norm": 2.160335785602276, "learning_rate": 9.219090934488372e-05, "loss": 4.1452, "step": 3287 }, { "epoch": 3.76, "grad_norm": 1.4205535958718132, "learning_rate": 9.203076314433661e-05, "loss": 4.208, "step": 3288 }, { "epoch": 3.76, "grad_norm": 1.5322958624443106, "learning_rate": 9.1870730956366e-05, "loss": 4.2827, "step": 3289 }, { "epoch": 3.76, "grad_norm": 2.1817129910339945, "learning_rate": 9.171081286870453e-05, "loss": 4.1515, "step": 3290 }, { "epoch": 3.76, "grad_norm": 1.7385081501840363, "learning_rate": 9.15510089690222e-05, "loss": 4.1317, "step": 3291 }, { "epoch": 3.76, "grad_norm": 1.6654703054418087, "learning_rate": 9.13913193449266e-05, "loss": 4.0178, "step": 3292 }, { "epoch": 3.76, "grad_norm": 1.2069041400896545, "learning_rate": 9.123174408396229e-05, "loss": 4.2794, "step": 3293 }, { "epoch": 3.76, "grad_norm": 2.015210008039759, "learning_rate": 9.10722832736115e-05, "loss": 4.0029, "step": 3294 }, { "epoch": 3.77, "grad_norm": 0.9164855033969092, "learning_rate": 9.091293700129361e-05, "loss": 4.0899, "step": 3295 }, { "epoch": 3.77, "grad_norm": 1.1380643386489189, "learning_rate": 9.075370535436506e-05, "loss": 4.1216, "step": 3296 }, { "epoch": 3.77, "grad_norm": 0.9514335143760437, "learning_rate": 9.05945884201199e-05, "loss": 4.2012, "step": 3297 }, { "epoch": 3.77, "grad_norm": 1.1594640738818303, "learning_rate": 9.043558628578865e-05, "loss": 4.0528, "step": 3298 }, { "epoch": 3.77, "grad_norm": 2.2595121842179187, "learning_rate": 9.027669903853931e-05, "loss": 4.2702, "step": 3299 }, { "epoch": 3.77, "grad_norm": 1.2534810818327904, "learning_rate": 9.011792676547684e-05, "loss": 4.0686, "step": 3300 }, { "epoch": 3.77, "eval_blimp_filtered_avg": 0.5229850746268657, "eval_blimp_filtered_std": 0.005626151609407658, "step": 3300 }, { "epoch": 3.77, "eval_blimp_supplement_avg": 0.4504310344827586, "eval_blimp_supplement_std": 0.022740295965290036, "step": 3300 }, { "epoch": 3.77, "eval_vqa_filtered_avg": 0.34, "eval_vqa_filtered_std": 0.047609522856952365, "step": 3300 }, { "epoch": 3.77, "eval_winoground_filtered_avg": 0.5, "eval_winoground_filtered_std": 0.050251890762960605, "step": 3300 }, { "epoch": 3.77, "grad_norm": 1.0518850399469923, "learning_rate": 8.995926955364321e-05, "loss": 4.2433, "step": 3301 }, { "epoch": 3.77, "grad_norm": 1.0509857439020014, "learning_rate": 8.980072749001705e-05, "loss": 4.117, "step": 3302 }, { "epoch": 3.77, "grad_norm": 1.8478959003575024, "learning_rate": 8.964230066151402e-05, "loss": 4.1293, "step": 3303 }, { "epoch": 3.78, "grad_norm": 1.109300122773905, "learning_rate": 8.948398915498687e-05, "loss": 3.9388, "step": 3304 }, { "epoch": 3.78, "grad_norm": 1.1520503153024664, "learning_rate": 8.932579305722483e-05, "loss": 4.0455, "step": 3305 }, { "epoch": 3.78, "grad_norm": 0.895551270095727, "learning_rate": 8.916771245495378e-05, "loss": 4.0789, "step": 3306 }, { "epoch": 3.78, "grad_norm": 2.1332757130979663, "learning_rate": 8.900974743483653e-05, "loss": 4.1813, "step": 3307 }, { "epoch": 3.78, "grad_norm": 1.1609411492499668, "learning_rate": 8.885189808347236e-05, "loss": 4.1828, "step": 3308 }, { "epoch": 3.78, "grad_norm": 1.4489678159425914, "learning_rate": 8.869416448739738e-05, "loss": 4.12, "step": 3309 }, { "epoch": 3.78, "grad_norm": 1.4367374373064303, "learning_rate": 8.853654673308369e-05, "loss": 4.1435, "step": 3310 }, { "epoch": 3.78, "grad_norm": 1.2100124306485411, "learning_rate": 8.837904490694058e-05, "loss": 3.9043, "step": 3311 }, { "epoch": 3.79, "grad_norm": 1.2966200936585779, "learning_rate": 8.822165909531337e-05, "loss": 4.0811, "step": 3312 }, { "epoch": 3.79, "grad_norm": 1.1941675155615084, "learning_rate": 8.806438938448387e-05, "loss": 4.1644, "step": 3313 }, { "epoch": 3.79, "grad_norm": 1.1219675197471404, "learning_rate": 8.790723586067035e-05, "loss": 4.0698, "step": 3314 }, { "epoch": 3.79, "grad_norm": 1.5081193398970874, "learning_rate": 8.775019861002702e-05, "loss": 4.0178, "step": 3315 }, { "epoch": 3.79, "grad_norm": 1.5457141789824853, "learning_rate": 8.759327771864474e-05, "loss": 4.1889, "step": 3316 }, { "epoch": 3.79, "grad_norm": 1.380264371571982, "learning_rate": 8.743647327255044e-05, "loss": 3.9785, "step": 3317 }, { "epoch": 3.79, "grad_norm": 1.5035487046486857, "learning_rate": 8.727978535770723e-05, "loss": 4.245, "step": 3318 }, { "epoch": 3.79, "grad_norm": 1.5052412096108791, "learning_rate": 8.712321406001429e-05, "loss": 4.2113, "step": 3319 }, { "epoch": 3.79, "grad_norm": 1.2145066584937283, "learning_rate": 8.69667594653069e-05, "loss": 3.9557, "step": 3320 }, { "epoch": 3.8, "grad_norm": 1.3810731096633102, "learning_rate": 8.681042165935639e-05, "loss": 4.0995, "step": 3321 }, { "epoch": 3.8, "grad_norm": 2.52899171534432, "learning_rate": 8.665420072787006e-05, "loss": 4.1049, "step": 3322 }, { "epoch": 3.8, "grad_norm": 1.2048700129434262, "learning_rate": 8.6498096756491e-05, "loss": 4.0844, "step": 3323 }, { "epoch": 3.8, "grad_norm": 1.9201501953325104, "learning_rate": 8.634210983079831e-05, "loss": 4.0, "step": 3324 }, { "epoch": 3.8, "grad_norm": 1.4299876860595098, "learning_rate": 8.618624003630697e-05, "loss": 4.1994, "step": 3325 }, { "epoch": 3.8, "grad_norm": 2.186811082666166, "learning_rate": 8.603048745846763e-05, "loss": 4.1379, "step": 3326 }, { "epoch": 3.8, "grad_norm": 1.3239673438426975, "learning_rate": 8.587485218266678e-05, "loss": 4.3094, "step": 3327 }, { "epoch": 3.8, "grad_norm": 2.2924551137383666, "learning_rate": 8.571933429422651e-05, "loss": 4.4288, "step": 3328 }, { "epoch": 3.8, "grad_norm": 1.9953091235208018, "learning_rate": 8.556393387840459e-05, "loss": 4.0379, "step": 3329 }, { "epoch": 3.81, "grad_norm": 2.789892874428047, "learning_rate": 8.540865102039446e-05, "loss": 4.1444, "step": 3330 }, { "epoch": 3.81, "grad_norm": 2.0718610060641565, "learning_rate": 8.525348580532505e-05, "loss": 4.1718, "step": 3331 }, { "epoch": 3.81, "grad_norm": 1.3370701933465095, "learning_rate": 8.509843831826072e-05, "loss": 4.2672, "step": 3332 }, { "epoch": 3.81, "grad_norm": 2.4978867507769316, "learning_rate": 8.494350864420137e-05, "loss": 4.0762, "step": 3333 }, { "epoch": 3.81, "grad_norm": 1.1861386484406944, "learning_rate": 8.478869686808234e-05, "loss": 4.2148, "step": 3334 }, { "epoch": 3.81, "grad_norm": 1.7387935642305796, "learning_rate": 8.463400307477433e-05, "loss": 4.0148, "step": 3335 }, { "epoch": 3.81, "grad_norm": 1.7661508322279906, "learning_rate": 8.447942734908326e-05, "loss": 4.0503, "step": 3336 }, { "epoch": 3.81, "grad_norm": 2.8985263223264894, "learning_rate": 8.43249697757505e-05, "loss": 4.4129, "step": 3337 }, { "epoch": 3.81, "grad_norm": 1.745210864212912, "learning_rate": 8.417063043945246e-05, "loss": 4.1317, "step": 3338 }, { "epoch": 3.82, "grad_norm": 1.335980176248465, "learning_rate": 8.401640942480096e-05, "loss": 3.9982, "step": 3339 }, { "epoch": 3.82, "grad_norm": 1.4163264696432207, "learning_rate": 8.38623068163426e-05, "loss": 4.1394, "step": 3340 }, { "epoch": 3.82, "grad_norm": 1.8563595317017516, "learning_rate": 8.370832269855935e-05, "loss": 4.1426, "step": 3341 }, { "epoch": 3.82, "grad_norm": 2.3596455334181647, "learning_rate": 8.355445715586821e-05, "loss": 4.1113, "step": 3342 }, { "epoch": 3.82, "grad_norm": 1.3091047882021514, "learning_rate": 8.340071027262105e-05, "loss": 4.167, "step": 3343 }, { "epoch": 3.82, "grad_norm": 1.7401030820387218, "learning_rate": 8.324708213310474e-05, "loss": 4.2679, "step": 3344 }, { "epoch": 3.82, "grad_norm": 1.1252546817407043, "learning_rate": 8.309357282154112e-05, "loss": 3.9514, "step": 3345 }, { "epoch": 3.82, "grad_norm": 1.4927864410503962, "learning_rate": 8.294018242208682e-05, "loss": 4.0077, "step": 3346 }, { "epoch": 3.83, "grad_norm": 3.60887229490552, "learning_rate": 8.278691101883328e-05, "loss": 4.3733, "step": 3347 }, { "epoch": 3.83, "grad_norm": 2.246449890851229, "learning_rate": 8.263375869580675e-05, "loss": 4.11, "step": 3348 }, { "epoch": 3.83, "grad_norm": 2.3579661327637096, "learning_rate": 8.248072553696808e-05, "loss": 4.2004, "step": 3349 }, { "epoch": 3.83, "grad_norm": 1.4487181363456263, "learning_rate": 8.23278116262128e-05, "loss": 4.1187, "step": 3350 }, { "epoch": 3.83, "grad_norm": 1.5196724204886165, "learning_rate": 8.217501704737135e-05, "loss": 4.0816, "step": 3351 }, { "epoch": 3.83, "grad_norm": 1.0694949856804647, "learning_rate": 8.202234188420855e-05, "loss": 4.1055, "step": 3352 }, { "epoch": 3.83, "grad_norm": 2.4708918732542076, "learning_rate": 8.186978622042352e-05, "loss": 4.3279, "step": 3353 }, { "epoch": 3.83, "grad_norm": 1.4407763788600658, "learning_rate": 8.17173501396502e-05, "loss": 4.0728, "step": 3354 }, { "epoch": 3.83, "grad_norm": 1.3887405088736797, "learning_rate": 8.156503372545693e-05, "loss": 4.2407, "step": 3355 }, { "epoch": 3.84, "grad_norm": 1.4420209082965683, "learning_rate": 8.141283706134637e-05, "loss": 4.1959, "step": 3356 }, { "epoch": 3.84, "grad_norm": 1.2150378240584478, "learning_rate": 8.126076023075535e-05, "loss": 4.3334, "step": 3357 }, { "epoch": 3.84, "grad_norm": 3.073322560711939, "learning_rate": 8.110880331705541e-05, "loss": 4.1578, "step": 3358 }, { "epoch": 3.84, "grad_norm": 1.2427332547998782, "learning_rate": 8.095696640355209e-05, "loss": 4.137, "step": 3359 }, { "epoch": 3.84, "grad_norm": 2.346414043082525, "learning_rate": 8.080524957348523e-05, "loss": 3.9984, "step": 3360 }, { "epoch": 3.84, "grad_norm": 1.778507204037714, "learning_rate": 8.06536529100287e-05, "loss": 4.142, "step": 3361 }, { "epoch": 3.84, "grad_norm": 1.2149825008595843, "learning_rate": 8.050217649629059e-05, "loss": 4.2951, "step": 3362 }, { "epoch": 3.84, "grad_norm": 1.538892449511593, "learning_rate": 8.035082041531312e-05, "loss": 4.029, "step": 3363 }, { "epoch": 3.84, "grad_norm": 1.9662110692401356, "learning_rate": 8.019958475007251e-05, "loss": 4.2029, "step": 3364 }, { "epoch": 3.85, "grad_norm": 1.6275817571829883, "learning_rate": 8.004846958347889e-05, "loss": 4.0334, "step": 3365 }, { "epoch": 3.85, "grad_norm": 1.6186152654939423, "learning_rate": 7.98974749983764e-05, "loss": 4.0779, "step": 3366 }, { "epoch": 3.85, "grad_norm": 1.5549236912061863, "learning_rate": 7.974660107754308e-05, "loss": 4.144, "step": 3367 }, { "epoch": 3.85, "grad_norm": 1.8173061154013437, "learning_rate": 7.959584790369078e-05, "loss": 3.9799, "step": 3368 }, { "epoch": 3.85, "grad_norm": 1.7511657868524069, "learning_rate": 7.944521555946525e-05, "loss": 4.0315, "step": 3369 }, { "epoch": 3.85, "grad_norm": 1.4861240778857125, "learning_rate": 7.929470412744579e-05, "loss": 4.1782, "step": 3370 }, { "epoch": 3.85, "grad_norm": 2.254123818750728, "learning_rate": 7.914431369014562e-05, "loss": 4.0544, "step": 3371 }, { "epoch": 3.85, "grad_norm": 2.414319803813102, "learning_rate": 7.899404433001154e-05, "loss": 3.8913, "step": 3372 }, { "epoch": 3.85, "grad_norm": 3.4859679339262124, "learning_rate": 7.884389612942399e-05, "loss": 4.0949, "step": 3373 }, { "epoch": 3.86, "grad_norm": 2.2079682893138184, "learning_rate": 7.869386917069699e-05, "loss": 4.2035, "step": 3374 }, { "epoch": 3.86, "grad_norm": 2.9409598737506397, "learning_rate": 7.854396353607811e-05, "loss": 4.0075, "step": 3375 }, { "epoch": 3.86, "grad_norm": 3.1761597778366055, "learning_rate": 7.839417930774836e-05, "loss": 4.019, "step": 3376 }, { "epoch": 3.86, "grad_norm": 1.6833246507199882, "learning_rate": 7.824451656782231e-05, "loss": 4.0529, "step": 3377 }, { "epoch": 3.86, "grad_norm": 5.64332792586859, "learning_rate": 7.809497539834766e-05, "loss": 4.2422, "step": 3378 }, { "epoch": 3.86, "grad_norm": 1.4092665455236553, "learning_rate": 7.794555588130575e-05, "loss": 4.0162, "step": 3379 }, { "epoch": 3.86, "grad_norm": 1.9097182089230074, "learning_rate": 7.779625809861114e-05, "loss": 3.9794, "step": 3380 }, { "epoch": 3.86, "grad_norm": 2.384410809231374, "learning_rate": 7.764708213211156e-05, "loss": 4.1199, "step": 3381 }, { "epoch": 3.87, "grad_norm": 1.7043668772202547, "learning_rate": 7.749802806358806e-05, "loss": 4.338, "step": 3382 }, { "epoch": 3.87, "grad_norm": 2.3790667133190353, "learning_rate": 7.734909597475487e-05, "loss": 4.1018, "step": 3383 }, { "epoch": 3.87, "grad_norm": 2.3448952554154694, "learning_rate": 7.720028594725929e-05, "loss": 4.2448, "step": 3384 }, { "epoch": 3.87, "grad_norm": 1.3004885138203928, "learning_rate": 7.705159806268169e-05, "loss": 4.1088, "step": 3385 }, { "epoch": 3.87, "grad_norm": 1.844620391872864, "learning_rate": 7.690303240253563e-05, "loss": 4.1142, "step": 3386 }, { "epoch": 3.87, "grad_norm": 2.068357525581232, "learning_rate": 7.675458904826739e-05, "loss": 4.041, "step": 3387 }, { "epoch": 3.87, "grad_norm": 3.7256970080300156, "learning_rate": 7.660626808125642e-05, "loss": 3.8905, "step": 3388 }, { "epoch": 3.87, "grad_norm": 2.548347864090204, "learning_rate": 7.645806958281497e-05, "loss": 4.2503, "step": 3389 }, { "epoch": 3.87, "grad_norm": 2.211094582740078, "learning_rate": 7.630999363418837e-05, "loss": 4.3927, "step": 3390 }, { "epoch": 3.88, "grad_norm": 3.0899305078334107, "learning_rate": 7.616204031655441e-05, "loss": 4.1496, "step": 3391 }, { "epoch": 3.88, "grad_norm": 1.6230703505809985, "learning_rate": 7.601420971102389e-05, "loss": 4.0591, "step": 3392 }, { "epoch": 3.88, "grad_norm": 1.494468773094374, "learning_rate": 7.586650189864029e-05, "loss": 4.2755, "step": 3393 }, { "epoch": 3.88, "grad_norm": 2.5228537110169174, "learning_rate": 7.571891696037979e-05, "loss": 3.9725, "step": 3394 }, { "epoch": 3.88, "grad_norm": 3.8015271475298165, "learning_rate": 7.557145497715109e-05, "loss": 4.2322, "step": 3395 }, { "epoch": 3.88, "grad_norm": 2.957810301426153, "learning_rate": 7.542411602979558e-05, "loss": 3.9961, "step": 3396 }, { "epoch": 3.88, "grad_norm": 2.5038145612528013, "learning_rate": 7.527690019908718e-05, "loss": 4.1258, "step": 3397 }, { "epoch": 3.88, "grad_norm": 1.729948682922274, "learning_rate": 7.512980756573253e-05, "loss": 3.9408, "step": 3398 }, { "epoch": 3.88, "grad_norm": 3.319429538393257, "learning_rate": 7.498283821037032e-05, "loss": 4.2339, "step": 3399 }, { "epoch": 3.89, "grad_norm": 1.847789669201742, "learning_rate": 7.483599221357188e-05, "loss": 4.0012, "step": 3400 }, { "epoch": 3.89, "eval_blimp_filtered_avg": 0.5219402985074627, "eval_blimp_filtered_std": 0.005628011061357745, "step": 3400 }, { "epoch": 3.89, "eval_blimp_supplement_avg": 0.4482758620689655, "eval_blimp_supplement_std": 0.022533914102181193, "step": 3400 }, { "epoch": 3.89, "eval_vqa_filtered_avg": 0.36, "eval_vqa_filtered_std": 0.04824181513244218, "step": 3400 }, { "epoch": 3.89, "eval_winoground_filtered_avg": 0.5, "eval_winoground_filtered_std": 0.050251890762960605, "step": 3400 }, { "epoch": 3.89, "grad_norm": 3.470018721832849, "learning_rate": 7.468926965584098e-05, "loss": 4.1482, "step": 3401 }, { "epoch": 3.89, "grad_norm": 2.2397420095128404, "learning_rate": 7.454267061761357e-05, "loss": 4.2162, "step": 3402 }, { "epoch": 3.89, "grad_norm": 2.089243034152301, "learning_rate": 7.439619517925806e-05, "loss": 4.2122, "step": 3403 }, { "epoch": 3.89, "grad_norm": 2.504611289602091, "learning_rate": 7.424984342107472e-05, "loss": 4.0375, "step": 3404 }, { "epoch": 3.89, "grad_norm": 2.235982658376364, "learning_rate": 7.410361542329657e-05, "loss": 4.0465, "step": 3405 }, { "epoch": 3.89, "grad_norm": 1.7538312831215086, "learning_rate": 7.395751126608842e-05, "loss": 4.1694, "step": 3406 }, { "epoch": 3.89, "grad_norm": 4.792053702622619, "learning_rate": 7.38115310295473e-05, "loss": 4.0679, "step": 3407 }, { "epoch": 3.89, "grad_norm": 2.739059841655377, "learning_rate": 7.366567479370215e-05, "loss": 4.0756, "step": 3408 }, { "epoch": 3.9, "grad_norm": 1.5481163498455444, "learning_rate": 7.351994263851413e-05, "loss": 4.2122, "step": 3409 }, { "epoch": 3.9, "grad_norm": 2.3789177623479203, "learning_rate": 7.337433464387631e-05, "loss": 4.1076, "step": 3410 }, { "epoch": 3.9, "grad_norm": 2.469553958482088, "learning_rate": 7.322885088961372e-05, "loss": 4.187, "step": 3411 }, { "epoch": 3.9, "grad_norm": 3.314921111075954, "learning_rate": 7.3083491455483e-05, "loss": 4.4027, "step": 3412 }, { "epoch": 3.9, "grad_norm": 2.4711899022192307, "learning_rate": 7.293825642117317e-05, "loss": 4.0521, "step": 3413 }, { "epoch": 3.9, "grad_norm": 2.349120990676839, "learning_rate": 7.279314586630462e-05, "loss": 4.1017, "step": 3414 }, { "epoch": 3.9, "grad_norm": 2.1873148832148375, "learning_rate": 7.26481598704298e-05, "loss": 4.1716, "step": 3415 }, { "epoch": 3.9, "grad_norm": 1.481339653012873, "learning_rate": 7.250329851303244e-05, "loss": 4.2414, "step": 3416 }, { "epoch": 3.91, "grad_norm": 1.3175360464850165, "learning_rate": 7.235856187352838e-05, "loss": 4.1332, "step": 3417 }, { "epoch": 3.91, "grad_norm": 0.9848752307300288, "learning_rate": 7.221395003126487e-05, "loss": 4.1389, "step": 3418 }, { "epoch": 3.91, "grad_norm": 1.4332955355371615, "learning_rate": 7.20694630655208e-05, "loss": 4.0235, "step": 3419 }, { "epoch": 3.91, "grad_norm": 1.4860826971494037, "learning_rate": 7.192510105550663e-05, "loss": 4.1448, "step": 3420 }, { "epoch": 3.91, "grad_norm": 1.5502279076412384, "learning_rate": 7.17808640803642e-05, "loss": 3.9278, "step": 3421 }, { "epoch": 3.91, "grad_norm": 2.2184124746945, "learning_rate": 7.163675221916699e-05, "loss": 4.0612, "step": 3422 }, { "epoch": 3.91, "grad_norm": 1.2521161196858128, "learning_rate": 7.149276555091972e-05, "loss": 3.9441, "step": 3423 }, { "epoch": 3.91, "grad_norm": 2.6998207519284954, "learning_rate": 7.13489041545586e-05, "loss": 4.0083, "step": 3424 }, { "epoch": 3.91, "grad_norm": 1.6948036794712216, "learning_rate": 7.1205168108951e-05, "loss": 4.0673, "step": 3425 }, { "epoch": 3.92, "grad_norm": 1.9083136054388012, "learning_rate": 7.106155749289571e-05, "loss": 4.1857, "step": 3426 }, { "epoch": 3.92, "grad_norm": 1.9336598926323545, "learning_rate": 7.091807238512276e-05, "loss": 4.1656, "step": 3427 }, { "epoch": 3.92, "grad_norm": 2.1693045260384753, "learning_rate": 7.077471286429328e-05, "loss": 4.2116, "step": 3428 }, { "epoch": 3.92, "grad_norm": 1.7180440452043282, "learning_rate": 7.063147900899968e-05, "loss": 4.3377, "step": 3429 }, { "epoch": 3.92, "grad_norm": 1.9922263375765297, "learning_rate": 7.048837089776535e-05, "loss": 4.0492, "step": 3430 }, { "epoch": 3.92, "grad_norm": 5.8110245405092265, "learning_rate": 7.034538860904482e-05, "loss": 4.0022, "step": 3431 }, { "epoch": 3.92, "grad_norm": 1.3078704921577244, "learning_rate": 7.020253222122369e-05, "loss": 4.16, "step": 3432 }, { "epoch": 3.92, "grad_norm": 1.348654826934665, "learning_rate": 7.005980181261836e-05, "loss": 4.1607, "step": 3433 }, { "epoch": 3.92, "grad_norm": 1.5297713544558276, "learning_rate": 6.991719746147628e-05, "loss": 4.1094, "step": 3434 }, { "epoch": 3.93, "grad_norm": 2.5183613811567174, "learning_rate": 6.977471924597585e-05, "loss": 4.2665, "step": 3435 }, { "epoch": 3.93, "grad_norm": 1.380223629748389, "learning_rate": 6.963236724422609e-05, "loss": 3.9095, "step": 3436 }, { "epoch": 3.93, "grad_norm": 4.325580821237642, "learning_rate": 6.949014153426738e-05, "loss": 4.17, "step": 3437 }, { "epoch": 3.93, "grad_norm": 1.5847543043490915, "learning_rate": 6.934804219407011e-05, "loss": 4.3351, "step": 3438 }, { "epoch": 3.93, "grad_norm": 2.8867950722683484, "learning_rate": 6.920606930153592e-05, "loss": 4.0453, "step": 3439 }, { "epoch": 3.93, "grad_norm": 1.757867175262845, "learning_rate": 6.90642229344969e-05, "loss": 4.1525, "step": 3440 }, { "epoch": 3.93, "grad_norm": 1.8266086516935927, "learning_rate": 6.892250317071603e-05, "loss": 4.3103, "step": 3441 }, { "epoch": 3.93, "grad_norm": 3.118193753256394, "learning_rate": 6.878091008788647e-05, "loss": 3.9629, "step": 3442 }, { "epoch": 3.93, "grad_norm": 1.692339092807929, "learning_rate": 6.863944376363222e-05, "loss": 4.0449, "step": 3443 }, { "epoch": 3.94, "grad_norm": 1.151465531504573, "learning_rate": 6.849810427550772e-05, "loss": 4.2318, "step": 3444 }, { "epoch": 3.94, "grad_norm": 1.6242586378911261, "learning_rate": 6.835689170099811e-05, "loss": 4.2257, "step": 3445 }, { "epoch": 3.94, "grad_norm": 3.24438797367478, "learning_rate": 6.821580611751848e-05, "loss": 4.1249, "step": 3446 }, { "epoch": 3.94, "grad_norm": 1.8605871689855538, "learning_rate": 6.80748476024146e-05, "loss": 4.0569, "step": 3447 }, { "epoch": 3.94, "grad_norm": 3.101605645042912, "learning_rate": 6.793401623296257e-05, "loss": 4.1084, "step": 3448 }, { "epoch": 3.94, "grad_norm": 1.3958115241882771, "learning_rate": 6.779331208636883e-05, "loss": 3.9488, "step": 3449 }, { "epoch": 3.94, "grad_norm": 1.739262636989102, "learning_rate": 6.765273523976982e-05, "loss": 4.043, "step": 3450 }, { "epoch": 3.94, "grad_norm": 1.9290841423785303, "learning_rate": 6.751228577023237e-05, "loss": 4.1535, "step": 3451 }, { "epoch": 3.95, "grad_norm": 2.2970291115809727, "learning_rate": 6.737196375475362e-05, "loss": 4.2475, "step": 3452 }, { "epoch": 3.95, "grad_norm": 2.6318991757245245, "learning_rate": 6.723176927026062e-05, "loss": 4.2696, "step": 3453 }, { "epoch": 3.95, "grad_norm": 2.8133433344600167, "learning_rate": 6.709170239361065e-05, "loss": 4.1249, "step": 3454 }, { "epoch": 3.95, "grad_norm": 2.8220270673672188, "learning_rate": 6.695176320159081e-05, "loss": 4.0947, "step": 3455 }, { "epoch": 3.95, "grad_norm": 1.599630642936928, "learning_rate": 6.681195177091842e-05, "loss": 4.1151, "step": 3456 }, { "epoch": 3.95, "grad_norm": 3.1824872160472784, "learning_rate": 6.667226817824067e-05, "loss": 4.0812, "step": 3457 }, { "epoch": 3.95, "grad_norm": 3.848714515958676, "learning_rate": 6.653271250013473e-05, "loss": 4.1854, "step": 3458 }, { "epoch": 3.95, "grad_norm": 2.559022294045578, "learning_rate": 6.63932848131076e-05, "loss": 3.9643, "step": 3459 }, { "epoch": 3.95, "grad_norm": 1.8088261615900627, "learning_rate": 6.625398519359605e-05, "loss": 3.9422, "step": 3460 }, { "epoch": 3.96, "grad_norm": 1.8519859070854041, "learning_rate": 6.611481371796674e-05, "loss": 4.214, "step": 3461 }, { "epoch": 3.96, "grad_norm": 2.0255100265750485, "learning_rate": 6.597577046251618e-05, "loss": 3.9239, "step": 3462 }, { "epoch": 3.96, "grad_norm": 3.4315138335817554, "learning_rate": 6.583685550347017e-05, "loss": 4.1546, "step": 3463 }, { "epoch": 3.96, "grad_norm": 2.0871922543234698, "learning_rate": 6.569806891698466e-05, "loss": 4.0996, "step": 3464 }, { "epoch": 3.96, "grad_norm": 1.6781911138929437, "learning_rate": 6.555941077914493e-05, "loss": 4.0037, "step": 3465 }, { "epoch": 3.96, "grad_norm": 1.9052747457383474, "learning_rate": 6.542088116596598e-05, "loss": 4.0219, "step": 3466 }, { "epoch": 3.96, "grad_norm": 2.45016693315942, "learning_rate": 6.528248015339225e-05, "loss": 4.2443, "step": 3467 }, { "epoch": 3.96, "grad_norm": 2.292821470969717, "learning_rate": 6.51442078172978e-05, "loss": 4.1301, "step": 3468 }, { "epoch": 3.96, "grad_norm": 2.488655856975399, "learning_rate": 6.500606423348604e-05, "loss": 4.1392, "step": 3469 }, { "epoch": 3.97, "grad_norm": 2.010321010272779, "learning_rate": 6.486804947768984e-05, "loss": 4.2417, "step": 3470 }, { "epoch": 3.97, "grad_norm": 2.393672572345543, "learning_rate": 6.473016362557155e-05, "loss": 3.9808, "step": 3471 }, { "epoch": 3.97, "grad_norm": 2.1518823646805685, "learning_rate": 6.459240675272252e-05, "loss": 4.3066, "step": 3472 }, { "epoch": 3.97, "grad_norm": 2.1963810693386523, "learning_rate": 6.445477893466372e-05, "loss": 4.0091, "step": 3473 }, { "epoch": 3.97, "grad_norm": 1.936092188681743, "learning_rate": 6.431728024684525e-05, "loss": 4.1202, "step": 3474 }, { "epoch": 3.97, "grad_norm": 3.073665176499354, "learning_rate": 6.417991076464648e-05, "loss": 4.207, "step": 3475 }, { "epoch": 3.97, "grad_norm": 2.905600596477906, "learning_rate": 6.404267056337584e-05, "loss": 4.1479, "step": 3476 }, { "epoch": 3.97, "grad_norm": 3.7100267208133517, "learning_rate": 6.390555971827095e-05, "loss": 4.103, "step": 3477 }, { "epoch": 3.97, "grad_norm": 2.2875590363680427, "learning_rate": 6.376857830449854e-05, "loss": 4.2382, "step": 3478 }, { "epoch": 3.98, "grad_norm": 1.9457960919051724, "learning_rate": 6.363172639715441e-05, "loss": 4.2627, "step": 3479 }, { "epoch": 3.98, "grad_norm": 3.1182339816371907, "learning_rate": 6.349500407126312e-05, "loss": 4.1978, "step": 3480 }, { "epoch": 3.98, "grad_norm": 4.991612699971542, "learning_rate": 6.335841140177851e-05, "loss": 4.0617, "step": 3481 }, { "epoch": 3.98, "grad_norm": 2.1690204897626857, "learning_rate": 6.322194846358317e-05, "loss": 3.9048, "step": 3482 }, { "epoch": 3.98, "grad_norm": 1.5472513968828265, "learning_rate": 6.30856153314886e-05, "loss": 3.9185, "step": 3483 }, { "epoch": 3.98, "grad_norm": 1.898410892349771, "learning_rate": 6.294941208023515e-05, "loss": 4.272, "step": 3484 }, { "epoch": 3.98, "grad_norm": 4.954931111265682, "learning_rate": 6.281333878449191e-05, "loss": 4.2094, "step": 3485 }, { "epoch": 3.98, "grad_norm": 2.3556245215414036, "learning_rate": 6.267739551885687e-05, "loss": 4.2815, "step": 3486 }, { "epoch": 3.99, "grad_norm": 3.0246034456165964, "learning_rate": 6.254158235785668e-05, "loss": 4.0454, "step": 3487 }, { "epoch": 3.99, "grad_norm": 2.1724604598500123, "learning_rate": 6.24058993759464e-05, "loss": 4.1188, "step": 3488 }, { "epoch": 3.99, "grad_norm": 1.3642957031518947, "learning_rate": 6.227034664751009e-05, "loss": 4.1455, "step": 3489 }, { "epoch": 3.99, "grad_norm": 2.2041174720177215, "learning_rate": 6.213492424686029e-05, "loss": 4.1365, "step": 3490 }, { "epoch": 3.99, "grad_norm": 1.807827488810114, "learning_rate": 6.199963224823788e-05, "loss": 4.0472, "step": 3491 }, { "epoch": 3.99, "grad_norm": 2.335851456663086, "learning_rate": 6.186447072581277e-05, "loss": 3.9763, "step": 3492 }, { "epoch": 3.99, "grad_norm": 2.883482320528189, "learning_rate": 6.172943975368269e-05, "loss": 4.1042, "step": 3493 }, { "epoch": 3.99, "grad_norm": 2.2436354701525016, "learning_rate": 6.159453940587424e-05, "loss": 4.1076, "step": 3494 }, { "epoch": 3.99, "grad_norm": 1.5966055793397647, "learning_rate": 6.145976975634228e-05, "loss": 3.9576, "step": 3495 }, { "epoch": 4.0, "grad_norm": 1.6825985345290422, "learning_rate": 6.132513087897007e-05, "loss": 4.108, "step": 3496 }, { "epoch": 4.0, "grad_norm": 3.9566097888547604, "learning_rate": 6.119062284756897e-05, "loss": 4.0415, "step": 3497 }, { "epoch": 4.0, "grad_norm": 2.706239460238766, "learning_rate": 6.105624573587882e-05, "loss": 4.2283, "step": 3498 }, { "epoch": 4.0, "grad_norm": 2.3765801577541357, "learning_rate": 6.092199961756774e-05, "loss": 4.0625, "step": 3499 }, { "epoch": 4.0, "grad_norm": 2.739307432103461, "learning_rate": 6.078788456623197e-05, "loss": 4.0601, "step": 3500 }, { "epoch": 4.0, "eval_blimp_filtered_avg": 0.5240298507462686, "eval_blimp_filtered_std": 0.005630169654235808, "step": 3500 }, { "epoch": 4.0, "eval_blimp_supplement_avg": 0.4504310344827586, "eval_blimp_supplement_std": 0.02258237285959225, "step": 3500 }, { "epoch": 4.0, "eval_vqa_filtered_avg": 0.37, "eval_vqa_filtered_std": 0.048523658709391, "step": 3500 }, { "epoch": 4.0, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 3500 }, { "epoch": 4.0, "eval_blimp_filtered_avg": 0.5240298507462686, "eval_blimp_filtered_std": 0.005630169654235808, "step": 3500 }, { "epoch": 4.0, "eval_blimp_supplement_avg": 0.4504310344827586, "eval_blimp_supplement_std": 0.02258237285959225, "step": 3500 }, { "epoch": 4.0, "eval_vqa_filtered_avg": 0.37, "eval_vqa_filtered_std": 0.048523658709391, "step": 3500 }, { "epoch": 4.0, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 3500 }, { "epoch": 4.0, "grad_norm": 1.7075372915311746, "learning_rate": 6.065390065539566e-05, "loss": 3.9221, "step": 3501 }, { "epoch": 4.0, "grad_norm": 2.598589349431351, "learning_rate": 6.052004795851135e-05, "loss": 4.113, "step": 3502 }, { "epoch": 4.0, "grad_norm": 2.820915136060971, "learning_rate": 6.0386326548959586e-05, "loss": 4.2593, "step": 3503 }, { "epoch": 4.0, "grad_norm": 3.191741811289664, "learning_rate": 6.025273650004898e-05, "loss": 4.0402, "step": 3504 }, { "epoch": 4.01, "grad_norm": 1.4727093427177709, "learning_rate": 6.0119277885015825e-05, "loss": 4.1636, "step": 3505 }, { "epoch": 4.01, "grad_norm": 2.2456756520920482, "learning_rate": 5.998595077702483e-05, "loss": 4.2336, "step": 3506 }, { "epoch": 4.01, "grad_norm": 2.2095242615007376, "learning_rate": 5.9852755249168315e-05, "loss": 3.8525, "step": 3507 }, { "epoch": 4.01, "grad_norm": 2.468290611430893, "learning_rate": 5.9719691374466516e-05, "loss": 4.1854, "step": 3508 }, { "epoch": 4.01, "grad_norm": 3.040043344784734, "learning_rate": 5.958675922586761e-05, "loss": 4.2067, "step": 3509 }, { "epoch": 4.01, "grad_norm": 2.6184799625210005, "learning_rate": 5.945395887624723e-05, "loss": 3.9965, "step": 3510 }, { "epoch": 4.01, "grad_norm": 2.80929383019444, "learning_rate": 5.9321290398409147e-05, "loss": 4.144, "step": 3511 }, { "epoch": 4.01, "grad_norm": 2.3100933172494087, "learning_rate": 5.918875386508462e-05, "loss": 4.1339, "step": 3512 }, { "epoch": 4.01, "grad_norm": 2.3247771387771095, "learning_rate": 5.905634934893264e-05, "loss": 4.2282, "step": 3513 }, { "epoch": 4.02, "grad_norm": 2.1392157259734397, "learning_rate": 5.89240769225398e-05, "loss": 4.3983, "step": 3514 }, { "epoch": 4.02, "grad_norm": 1.5940489516236678, "learning_rate": 5.879193665842028e-05, "loss": 4.0937, "step": 3515 }, { "epoch": 4.02, "grad_norm": 2.4128766487299433, "learning_rate": 5.865992862901581e-05, "loss": 4.0056, "step": 3516 }, { "epoch": 4.02, "grad_norm": 2.378453104810622, "learning_rate": 5.8528052906695754e-05, "loss": 4.0268, "step": 3517 }, { "epoch": 4.02, "grad_norm": 1.734870061248331, "learning_rate": 5.8396309563756595e-05, "loss": 4.0503, "step": 3518 }, { "epoch": 4.02, "grad_norm": 2.22939070117633, "learning_rate": 5.8264698672422613e-05, "loss": 4.0084, "step": 3519 }, { "epoch": 4.02, "grad_norm": 2.6419235359217965, "learning_rate": 5.8133220304845276e-05, "loss": 4.0112, "step": 3520 }, { "epoch": 4.02, "grad_norm": 2.420327913540527, "learning_rate": 5.8001874533103475e-05, "loss": 4.2076, "step": 3521 }, { "epoch": 4.03, "grad_norm": 4.4122761669434185, "learning_rate": 5.787066142920339e-05, "loss": 4.1702, "step": 3522 }, { "epoch": 4.03, "grad_norm": 3.6482888068528774, "learning_rate": 5.773958106507844e-05, "loss": 4.0976, "step": 3523 }, { "epoch": 4.03, "grad_norm": 2.4409284668029243, "learning_rate": 5.7608633512589353e-05, "loss": 4.3172, "step": 3524 }, { "epoch": 4.03, "grad_norm": 1.9424071671925356, "learning_rate": 5.747781884352396e-05, "loss": 4.0566, "step": 3525 }, { "epoch": 4.03, "grad_norm": 1.98026865969247, "learning_rate": 5.734713712959736e-05, "loss": 4.2095, "step": 3526 }, { "epoch": 4.03, "grad_norm": 2.878657245771692, "learning_rate": 5.721658844245153e-05, "loss": 4.1011, "step": 3527 }, { "epoch": 4.03, "grad_norm": 3.230259443936672, "learning_rate": 5.7086172853655734e-05, "loss": 4.1763, "step": 3528 }, { "epoch": 4.03, "grad_norm": 2.7201923801798156, "learning_rate": 5.6955890434706254e-05, "loss": 3.9993, "step": 3529 }, { "epoch": 4.03, "grad_norm": 3.067605526787616, "learning_rate": 5.682574125702626e-05, "loss": 4.0871, "step": 3530 }, { "epoch": 4.04, "grad_norm": 12.791950912037768, "learning_rate": 5.6695725391965975e-05, "loss": 4.1648, "step": 3531 }, { "epoch": 4.04, "grad_norm": 2.5082280405687865, "learning_rate": 5.656584291080248e-05, "loss": 4.0296, "step": 3532 }, { "epoch": 4.04, "grad_norm": 2.1402752935860803, "learning_rate": 5.643609388473973e-05, "loss": 4.1179, "step": 3533 }, { "epoch": 4.04, "grad_norm": 2.522998470014395, "learning_rate": 5.6306478384908685e-05, "loss": 4.1138, "step": 3534 }, { "epoch": 4.04, "grad_norm": 2.5947858296007853, "learning_rate": 5.61769964823667e-05, "loss": 4.0997, "step": 3535 }, { "epoch": 4.04, "grad_norm": 5.321972315426501, "learning_rate": 5.6047648248098295e-05, "loss": 4.0776, "step": 3536 }, { "epoch": 4.04, "grad_norm": 5.344490044865733, "learning_rate": 5.591843375301456e-05, "loss": 4.1193, "step": 3537 }, { "epoch": 4.04, "grad_norm": 3.444675384158124, "learning_rate": 5.578935306795325e-05, "loss": 4.275, "step": 3538 }, { "epoch": 4.04, "grad_norm": 2.848810277902967, "learning_rate": 5.566040626367876e-05, "loss": 4.0741, "step": 3539 }, { "epoch": 4.05, "grad_norm": 4.010707911723712, "learning_rate": 5.553159341088211e-05, "loss": 4.0755, "step": 3540 }, { "epoch": 4.05, "grad_norm": 5.028743821661657, "learning_rate": 5.540291458018096e-05, "loss": 4.1993, "step": 3541 }, { "epoch": 4.05, "grad_norm": 5.068107858066767, "learning_rate": 5.5274369842119325e-05, "loss": 3.9684, "step": 3542 }, { "epoch": 4.05, "grad_norm": 2.3737456782388175, "learning_rate": 5.514595926716795e-05, "loss": 4.0375, "step": 3543 }, { "epoch": 4.05, "grad_norm": 3.317066957801331, "learning_rate": 5.501768292572368e-05, "loss": 4.0848, "step": 3544 }, { "epoch": 4.05, "grad_norm": 4.001136557476432, "learning_rate": 5.4889540888110006e-05, "loss": 4.2648, "step": 3545 }, { "epoch": 4.05, "grad_norm": 5.015089213842579, "learning_rate": 5.476153322457689e-05, "loss": 4.3071, "step": 3546 }, { "epoch": 4.05, "grad_norm": 4.332737110312137, "learning_rate": 5.463366000530054e-05, "loss": 4.1417, "step": 3547 }, { "epoch": 4.05, "grad_norm": 2.142962660200713, "learning_rate": 5.4505921300383206e-05, "loss": 4.0741, "step": 3548 }, { "epoch": 4.06, "grad_norm": 4.579399725138693, "learning_rate": 5.437831717985374e-05, "loss": 4.1862, "step": 3549 }, { "epoch": 4.06, "grad_norm": 3.606865827222683, "learning_rate": 5.4250847713667014e-05, "loss": 4.0912, "step": 3550 }, { "epoch": 4.06, "grad_norm": 2.3965773942002677, "learning_rate": 5.412351297170426e-05, "loss": 4.0709, "step": 3551 }, { "epoch": 4.06, "grad_norm": 5.31315330392113, "learning_rate": 5.399631302377252e-05, "loss": 4.3931, "step": 3552 }, { "epoch": 4.06, "grad_norm": 9.79507585167842, "learning_rate": 5.386924793960533e-05, "loss": 4.1638, "step": 3553 }, { "epoch": 4.06, "grad_norm": 1.9000302223317878, "learning_rate": 5.374231778886211e-05, "loss": 4.1287, "step": 3554 }, { "epoch": 4.06, "grad_norm": 3.090100560824642, "learning_rate": 5.361552264112833e-05, "loss": 4.1297, "step": 3555 }, { "epoch": 4.06, "grad_norm": 3.462344971719606, "learning_rate": 5.3488862565915314e-05, "loss": 4.0514, "step": 3556 }, { "epoch": 4.07, "grad_norm": 2.529639624401169, "learning_rate": 5.3362337632660526e-05, "loss": 4.2577, "step": 3557 }, { "epoch": 4.07, "grad_norm": 2.268078829755966, "learning_rate": 5.323594791072724e-05, "loss": 4.175, "step": 3558 }, { "epoch": 4.07, "grad_norm": 4.416049110535458, "learning_rate": 5.310969346940467e-05, "loss": 4.1469, "step": 3559 }, { "epoch": 4.07, "grad_norm": 1.6177420254825572, "learning_rate": 5.298357437790781e-05, "loss": 4.1866, "step": 3560 }, { "epoch": 4.07, "grad_norm": 3.3316744867948227, "learning_rate": 5.2857590705377495e-05, "loss": 4.2168, "step": 3561 }, { "epoch": 4.07, "grad_norm": 14.004467923721895, "learning_rate": 5.2731742520880306e-05, "loss": 4.061, "step": 3562 }, { "epoch": 4.07, "grad_norm": 2.905782346746633, "learning_rate": 5.2606029893408486e-05, "loss": 4.1482, "step": 3563 }, { "epoch": 4.07, "grad_norm": 3.4514697186083922, "learning_rate": 5.248045289188017e-05, "loss": 4.4161, "step": 3564 }, { "epoch": 4.07, "grad_norm": 5.935484268848983, "learning_rate": 5.235501158513881e-05, "loss": 4.4268, "step": 3565 }, { "epoch": 4.08, "grad_norm": 4.275284440554483, "learning_rate": 5.22297060419537e-05, "loss": 4.3941, "step": 3566 }, { "epoch": 4.08, "grad_norm": 9.082787689708622, "learning_rate": 5.210453633101968e-05, "loss": 4.1896, "step": 3567 }, { "epoch": 4.08, "grad_norm": 5.867315258788226, "learning_rate": 5.197950252095711e-05, "loss": 4.3694, "step": 3568 }, { "epoch": 4.08, "grad_norm": 4.671059362352427, "learning_rate": 5.1854604680311765e-05, "loss": 3.989, "step": 3569 }, { "epoch": 4.08, "grad_norm": 3.247118131856371, "learning_rate": 5.1729842877555036e-05, "loss": 3.9313, "step": 3570 }, { "epoch": 4.08, "grad_norm": 2.9378153984924147, "learning_rate": 5.1605217181083594e-05, "loss": 4.0907, "step": 3571 }, { "epoch": 4.08, "grad_norm": 3.081252416293258, "learning_rate": 5.1480727659219604e-05, "loss": 4.1651, "step": 3572 }, { "epoch": 4.08, "grad_norm": 3.4445595433517697, "learning_rate": 5.135637438021044e-05, "loss": 4.0493, "step": 3573 }, { "epoch": 4.08, "grad_norm": 3.0666528845879157, "learning_rate": 5.123215741222888e-05, "loss": 4.2087, "step": 3574 }, { "epoch": 4.09, "grad_norm": 3.5851993840612653, "learning_rate": 5.1108076823373014e-05, "loss": 4.2625, "step": 3575 }, { "epoch": 4.09, "grad_norm": 4.3938222369688305, "learning_rate": 5.0984132681666054e-05, "loss": 4.1969, "step": 3576 }, { "epoch": 4.09, "grad_norm": 3.0662066044007963, "learning_rate": 5.08603250550565e-05, "loss": 4.0751, "step": 3577 }, { "epoch": 4.09, "grad_norm": 3.3923288014385773, "learning_rate": 5.073665401141796e-05, "loss": 4.0649, "step": 3578 }, { "epoch": 4.09, "grad_norm": 2.3796999722476486, "learning_rate": 5.0613119618549235e-05, "loss": 4.1409, "step": 3579 }, { "epoch": 4.09, "grad_norm": 2.490782452264804, "learning_rate": 5.04897219441741e-05, "loss": 4.2024, "step": 3580 }, { "epoch": 4.09, "grad_norm": 3.7546327372179626, "learning_rate": 5.0366461055941545e-05, "loss": 4.2641, "step": 3581 }, { "epoch": 4.09, "grad_norm": 4.282670696850256, "learning_rate": 5.0243337021425314e-05, "loss": 4.1234, "step": 3582 }, { "epoch": 4.09, "grad_norm": 5.554601141617358, "learning_rate": 5.0120349908124314e-05, "loss": 3.9754, "step": 3583 }, { "epoch": 4.1, "grad_norm": 4.407669682737058, "learning_rate": 4.9997499783462346e-05, "loss": 4.2378, "step": 3584 }, { "epoch": 4.1, "grad_norm": 27.888943282567677, "learning_rate": 4.987478671478831e-05, "loss": 4.0176, "step": 3585 }, { "epoch": 4.1, "grad_norm": 2.396506910788156, "learning_rate": 4.975221076937551e-05, "loss": 4.2301, "step": 3586 }, { "epoch": 4.1, "grad_norm": 2.8792619358918006, "learning_rate": 4.962977201442248e-05, "loss": 4.1278, "step": 3587 }, { "epoch": 4.1, "grad_norm": 5.883881528022609, "learning_rate": 4.9507470517052375e-05, "loss": 4.2444, "step": 3588 }, { "epoch": 4.1, "grad_norm": 4.2078105581336, "learning_rate": 4.93853063443132e-05, "loss": 4.2882, "step": 3589 }, { "epoch": 4.1, "grad_norm": 3.3397891771736252, "learning_rate": 4.926327956317749e-05, "loss": 4.2494, "step": 3590 }, { "epoch": 4.1, "grad_norm": 3.3646523372931747, "learning_rate": 4.914139024054267e-05, "loss": 4.4453, "step": 3591 }, { "epoch": 4.11, "grad_norm": 3.1083894987660416, "learning_rate": 4.901963844323057e-05, "loss": 4.4633, "step": 3592 }, { "epoch": 4.11, "grad_norm": 2.1172080040575127, "learning_rate": 4.889802423798807e-05, "loss": 4.1917, "step": 3593 }, { "epoch": 4.11, "grad_norm": 2.6615660917725013, "learning_rate": 4.877654769148606e-05, "loss": 4.3003, "step": 3594 }, { "epoch": 4.11, "grad_norm": 2.5523401723525385, "learning_rate": 4.865520887032034e-05, "loss": 4.2777, "step": 3595 }, { "epoch": 4.11, "grad_norm": 12.185781555777368, "learning_rate": 4.8534007841011043e-05, "loss": 4.2261, "step": 3596 }, { "epoch": 4.11, "grad_norm": 3.22281287487506, "learning_rate": 4.841294467000283e-05, "loss": 4.1856, "step": 3597 }, { "epoch": 4.11, "grad_norm": 3.4459971142781867, "learning_rate": 4.829201942366483e-05, "loss": 4.108, "step": 3598 }, { "epoch": 4.11, "grad_norm": 2.720613178267878, "learning_rate": 4.817123216829032e-05, "loss": 4.2964, "step": 3599 }, { "epoch": 4.11, "grad_norm": 2.846198699750679, "learning_rate": 4.8050582970097295e-05, "loss": 4.2912, "step": 3600 }, { "epoch": 4.11, "eval_blimp_filtered_avg": 0.524179104477612, "eval_blimp_filtered_std": 0.005679808762812842, "step": 3600 }, { "epoch": 4.11, "eval_blimp_supplement_avg": 0.45905172413793105, "eval_blimp_supplement_std": 0.022908982054118343, "step": 3600 }, { "epoch": 4.11, "eval_vqa_filtered_avg": 0.37, "eval_vqa_filtered_std": 0.048523658709391, "step": 3600 }, { "epoch": 4.11, "eval_winoground_filtered_avg": 0.48, "eval_winoground_filtered_std": 0.05021167315686779, "step": 3600 }, { "epoch": 4.12, "grad_norm": 5.131782034283068, "learning_rate": 4.7930071895227785e-05, "loss": 4.3504, "step": 3601 }, { "epoch": 4.12, "grad_norm": 5.581362767895976, "learning_rate": 4.78096990097483e-05, "loss": 4.174, "step": 3602 }, { "epoch": 4.12, "grad_norm": 6.110857285708107, "learning_rate": 4.76894643796493e-05, "loss": 3.9011, "step": 3603 }, { "epoch": 4.12, "grad_norm": 2.5832484369995896, "learning_rate": 4.7569368070845685e-05, "loss": 4.1102, "step": 3604 }, { "epoch": 4.12, "grad_norm": 8.0145152780317, "learning_rate": 4.7449410149176516e-05, "loss": 4.0851, "step": 3605 }, { "epoch": 4.12, "grad_norm": 3.8413681816037544, "learning_rate": 4.732959068040499e-05, "loss": 4.132, "step": 3606 }, { "epoch": 4.12, "grad_norm": 3.3359890694387717, "learning_rate": 4.720990973021814e-05, "loss": 4.1289, "step": 3607 }, { "epoch": 4.12, "grad_norm": 3.6610589894518455, "learning_rate": 4.709036736422751e-05, "loss": 4.1985, "step": 3608 }, { "epoch": 4.12, "grad_norm": 3.572399539195982, "learning_rate": 4.6970963647968306e-05, "loss": 4.3221, "step": 3609 }, { "epoch": 4.13, "grad_norm": 6.618275238979667, "learning_rate": 4.685169864689997e-05, "loss": 4.0706, "step": 3610 }, { "epoch": 4.13, "grad_norm": 2.397455503599506, "learning_rate": 4.673257242640563e-05, "loss": 4.0416, "step": 3611 }, { "epoch": 4.13, "grad_norm": 2.8466327292153877, "learning_rate": 4.6613585051792523e-05, "loss": 4.0198, "step": 3612 }, { "epoch": 4.13, "grad_norm": 2.5325309387337795, "learning_rate": 4.649473658829176e-05, "loss": 4.0148, "step": 3613 }, { "epoch": 4.13, "grad_norm": 2.949252771556712, "learning_rate": 4.637602710105825e-05, "loss": 4.1447, "step": 3614 }, { "epoch": 4.13, "grad_norm": 3.881652909076067, "learning_rate": 4.6257456655170723e-05, "loss": 4.0515, "step": 3615 }, { "epoch": 4.13, "grad_norm": 2.984135481559177, "learning_rate": 4.6139025315631664e-05, "loss": 4.1306, "step": 3616 }, { "epoch": 4.13, "grad_norm": 3.8769972117637828, "learning_rate": 4.602073314736737e-05, "loss": 4.3417, "step": 3617 }, { "epoch": 4.13, "grad_norm": 3.2123672206147575, "learning_rate": 4.590258021522778e-05, "loss": 4.0543, "step": 3618 }, { "epoch": 4.14, "grad_norm": 3.2556661805071196, "learning_rate": 4.5784566583986536e-05, "loss": 4.025, "step": 3619 }, { "epoch": 4.14, "grad_norm": 2.8773568905651734, "learning_rate": 4.5666692318340825e-05, "loss": 4.1821, "step": 3620 }, { "epoch": 4.14, "grad_norm": 3.3582550092351373, "learning_rate": 4.5548957482911484e-05, "loss": 4.2361, "step": 3621 }, { "epoch": 4.14, "grad_norm": 3.2113267285914255, "learning_rate": 4.543136214224299e-05, "loss": 3.9839, "step": 3622 }, { "epoch": 4.14, "grad_norm": 4.594280805041271, "learning_rate": 4.531390636080326e-05, "loss": 3.9496, "step": 3623 }, { "epoch": 4.14, "grad_norm": 4.298898080151559, "learning_rate": 4.519659020298371e-05, "loss": 4.1084, "step": 3624 }, { "epoch": 4.14, "grad_norm": 5.881376028009944, "learning_rate": 4.507941373309927e-05, "loss": 4.1397, "step": 3625 }, { "epoch": 4.14, "grad_norm": 4.368712011424131, "learning_rate": 4.496237701538816e-05, "loss": 4.1113, "step": 3626 }, { "epoch": 4.15, "grad_norm": 7.438650643518618, "learning_rate": 4.48454801140122e-05, "loss": 4.1301, "step": 3627 }, { "epoch": 4.15, "grad_norm": 3.721389973320724, "learning_rate": 4.472872309305632e-05, "loss": 4.1596, "step": 3628 }, { "epoch": 4.15, "grad_norm": 4.77930486982069, "learning_rate": 4.461210601652885e-05, "loss": 4.1862, "step": 3629 }, { "epoch": 4.15, "grad_norm": 7.783998153597915, "learning_rate": 4.449562894836152e-05, "loss": 4.0013, "step": 3630 }, { "epoch": 4.15, "grad_norm": 4.382224339566421, "learning_rate": 4.4379291952409076e-05, "loss": 4.1968, "step": 3631 }, { "epoch": 4.15, "grad_norm": 3.709805678169295, "learning_rate": 4.426309509244985e-05, "loss": 4.3482, "step": 3632 }, { "epoch": 4.15, "grad_norm": 5.232961325614194, "learning_rate": 4.4147038432184914e-05, "loss": 4.2531, "step": 3633 }, { "epoch": 4.15, "grad_norm": 7.864860839854472, "learning_rate": 4.4031122035238664e-05, "loss": 4.2552, "step": 3634 }, { "epoch": 4.15, "grad_norm": 4.732335112526321, "learning_rate": 4.391534596515871e-05, "loss": 4.125, "step": 3635 }, { "epoch": 4.16, "grad_norm": 3.686163598572516, "learning_rate": 4.379971028541565e-05, "loss": 4.1646, "step": 3636 }, { "epoch": 4.16, "grad_norm": 2.7408830581682406, "learning_rate": 4.368421505940293e-05, "loss": 4.1597, "step": 3637 }, { "epoch": 4.16, "grad_norm": 8.371716413049333, "learning_rate": 4.3568860350437276e-05, "loss": 4.1837, "step": 3638 }, { "epoch": 4.16, "grad_norm": 5.308445809471748, "learning_rate": 4.345364622175823e-05, "loss": 4.0191, "step": 3639 }, { "epoch": 4.16, "grad_norm": 6.182580288963607, "learning_rate": 4.333857273652844e-05, "loss": 4.2567, "step": 3640 }, { "epoch": 4.16, "grad_norm": 8.454844929772934, "learning_rate": 4.3223639957833175e-05, "loss": 4.1818, "step": 3641 }, { "epoch": 4.16, "grad_norm": 3.2326631131096333, "learning_rate": 4.31088479486807e-05, "loss": 4.4526, "step": 3642 }, { "epoch": 4.16, "grad_norm": 5.6622054249693035, "learning_rate": 4.299419677200221e-05, "loss": 4.1159, "step": 3643 }, { "epoch": 4.16, "grad_norm": 6.582481844127717, "learning_rate": 4.287968649065159e-05, "loss": 3.8922, "step": 3644 }, { "epoch": 4.17, "grad_norm": 6.120517525425776, "learning_rate": 4.276531716740539e-05, "loss": 4.1076, "step": 3645 }, { "epoch": 4.17, "grad_norm": 3.354250745850937, "learning_rate": 4.265108886496299e-05, "loss": 3.9887, "step": 3646 }, { "epoch": 4.17, "grad_norm": 6.343302935636311, "learning_rate": 4.2537001645946566e-05, "loss": 4.2847, "step": 3647 }, { "epoch": 4.17, "grad_norm": 5.704364224084587, "learning_rate": 4.242305557290076e-05, "loss": 4.0646, "step": 3648 }, { "epoch": 4.17, "grad_norm": 6.401201939345437, "learning_rate": 4.230925070829302e-05, "loss": 4.1448, "step": 3649 }, { "epoch": 4.17, "grad_norm": 3.1569911886799606, "learning_rate": 4.219558711451312e-05, "loss": 4.0598, "step": 3650 }, { "epoch": 4.17, "grad_norm": 5.37262696430018, "learning_rate": 4.208206485387364e-05, "loss": 3.9892, "step": 3651 }, { "epoch": 4.17, "grad_norm": 8.92468002418296, "learning_rate": 4.196868398860951e-05, "loss": 4.2384, "step": 3652 }, { "epoch": 4.17, "grad_norm": 9.768349531865256, "learning_rate": 4.1855444580878295e-05, "loss": 3.9139, "step": 3653 }, { "epoch": 4.18, "grad_norm": 2.4203135655101966, "learning_rate": 4.174234669275984e-05, "loss": 4.064, "step": 3654 }, { "epoch": 4.18, "grad_norm": 5.488266848769192, "learning_rate": 4.162939038625659e-05, "loss": 3.9972, "step": 3655 }, { "epoch": 4.18, "grad_norm": 4.78295241978529, "learning_rate": 4.151657572329323e-05, "loss": 4.0129, "step": 3656 }, { "epoch": 4.18, "grad_norm": 5.263118229560215, "learning_rate": 4.14039027657169e-05, "loss": 4.2694, "step": 3657 }, { "epoch": 4.18, "grad_norm": 2.205895123491167, "learning_rate": 4.1291371575296887e-05, "loss": 4.2937, "step": 3658 }, { "epoch": 4.18, "grad_norm": 2.63810083641438, "learning_rate": 4.1178982213724896e-05, "loss": 4.4265, "step": 3659 }, { "epoch": 4.18, "grad_norm": 3.2736196137722655, "learning_rate": 4.106673474261489e-05, "loss": 4.3539, "step": 3660 }, { "epoch": 4.18, "grad_norm": 3.1748114105906446, "learning_rate": 4.095462922350301e-05, "loss": 4.1853, "step": 3661 }, { "epoch": 4.19, "grad_norm": 2.8014063567239433, "learning_rate": 4.084266571784755e-05, "loss": 3.9337, "step": 3662 }, { "epoch": 4.19, "grad_norm": 3.732077700571058, "learning_rate": 4.073084428702899e-05, "loss": 4.1371, "step": 3663 }, { "epoch": 4.19, "grad_norm": 3.8921418110020287, "learning_rate": 4.061916499234986e-05, "loss": 4.1039, "step": 3664 }, { "epoch": 4.19, "grad_norm": 4.319331091223462, "learning_rate": 4.050762789503488e-05, "loss": 4.1437, "step": 3665 }, { "epoch": 4.19, "grad_norm": 2.4677873087719195, "learning_rate": 4.0396233056230765e-05, "loss": 4.1445, "step": 3666 }, { "epoch": 4.19, "grad_norm": 3.6364411472694735, "learning_rate": 4.028498053700607e-05, "loss": 4.3646, "step": 3667 }, { "epoch": 4.19, "grad_norm": 2.4230227854747644, "learning_rate": 4.0173870398351616e-05, "loss": 4.1127, "step": 3668 }, { "epoch": 4.19, "grad_norm": 4.083442276206653, "learning_rate": 4.006290270117996e-05, "loss": 4.2346, "step": 3669 }, { "epoch": 4.19, "grad_norm": 3.76395978151018, "learning_rate": 3.99520775063257e-05, "loss": 4.1627, "step": 3670 }, { "epoch": 4.2, "grad_norm": 6.133603414028325, "learning_rate": 3.984139487454524e-05, "loss": 4.3092, "step": 3671 }, { "epoch": 4.2, "grad_norm": 2.7708459921013695, "learning_rate": 3.973085486651682e-05, "loss": 4.1053, "step": 3672 }, { "epoch": 4.2, "grad_norm": 2.0740382253985166, "learning_rate": 3.9620457542840546e-05, "loss": 4.0934, "step": 3673 }, { "epoch": 4.2, "grad_norm": 4.445493466660575, "learning_rate": 3.9510202964038316e-05, "loss": 4.0642, "step": 3674 }, { "epoch": 4.2, "grad_norm": 6.326323298806858, "learning_rate": 3.94000911905536e-05, "loss": 4.0063, "step": 3675 }, { "epoch": 4.2, "grad_norm": 5.6882323317049055, "learning_rate": 3.929012228275176e-05, "loss": 4.355, "step": 3676 }, { "epoch": 4.2, "grad_norm": 2.943847960274427, "learning_rate": 3.9180296300919775e-05, "loss": 4.0447, "step": 3677 }, { "epoch": 4.2, "grad_norm": 4.275925193578393, "learning_rate": 3.907061330526633e-05, "loss": 4.1654, "step": 3678 }, { "epoch": 4.2, "grad_norm": 5.155874484271656, "learning_rate": 3.896107335592161e-05, "loss": 4.1328, "step": 3679 }, { "epoch": 4.21, "grad_norm": 5.628279075093127, "learning_rate": 3.885167651293746e-05, "loss": 4.1944, "step": 3680 }, { "epoch": 4.21, "grad_norm": 5.205842245851854, "learning_rate": 3.8742422836287254e-05, "loss": 3.9919, "step": 3681 }, { "epoch": 4.21, "grad_norm": 3.568618505820273, "learning_rate": 3.8633312385865935e-05, "loss": 3.9594, "step": 3682 }, { "epoch": 4.21, "grad_norm": 3.730447177801977, "learning_rate": 3.852434522148974e-05, "loss": 4.0065, "step": 3683 }, { "epoch": 4.21, "grad_norm": 5.287833821358675, "learning_rate": 3.841552140289656e-05, "loss": 4.1788, "step": 3684 }, { "epoch": 4.21, "grad_norm": 2.863253284778557, "learning_rate": 3.830684098974556e-05, "loss": 4.0146, "step": 3685 }, { "epoch": 4.21, "grad_norm": 3.540012932846599, "learning_rate": 3.8198304041617386e-05, "loss": 4.0053, "step": 3686 }, { "epoch": 4.21, "grad_norm": 5.922100358543876, "learning_rate": 3.808991061801412e-05, "loss": 4.0111, "step": 3687 }, { "epoch": 4.21, "grad_norm": 3.7399586712416495, "learning_rate": 3.798166077835885e-05, "loss": 4.0448, "step": 3688 }, { "epoch": 4.22, "grad_norm": 3.604636442640428, "learning_rate": 3.78735545819962e-05, "loss": 3.9785, "step": 3689 }, { "epoch": 4.22, "grad_norm": 3.8329153142536105, "learning_rate": 3.776559208819204e-05, "loss": 4.1237, "step": 3690 }, { "epoch": 4.22, "grad_norm": 3.82260847405284, "learning_rate": 3.765777335613339e-05, "loss": 3.923, "step": 3691 }, { "epoch": 4.22, "grad_norm": 4.560116597937605, "learning_rate": 3.75500984449284e-05, "loss": 4.1643, "step": 3692 }, { "epoch": 4.22, "grad_norm": 5.250877080088921, "learning_rate": 3.744256741360638e-05, "loss": 3.9902, "step": 3693 }, { "epoch": 4.22, "grad_norm": 5.443282896168814, "learning_rate": 3.733518032111805e-05, "loss": 4.0529, "step": 3694 }, { "epoch": 4.22, "grad_norm": 3.1403446913079986, "learning_rate": 3.722793722633491e-05, "loss": 4.3579, "step": 3695 }, { "epoch": 4.22, "grad_norm": 3.9121579359221776, "learning_rate": 3.712083818804951e-05, "loss": 4.1671, "step": 3696 }, { "epoch": 4.23, "grad_norm": 5.068376304786552, "learning_rate": 3.70138832649756e-05, "loss": 3.9384, "step": 3697 }, { "epoch": 4.23, "grad_norm": 5.419705714112912, "learning_rate": 3.690707251574781e-05, "loss": 3.9901, "step": 3698 }, { "epoch": 4.23, "grad_norm": 2.984841634948577, "learning_rate": 3.680040599892189e-05, "loss": 4.0834, "step": 3699 }, { "epoch": 4.23, "grad_norm": 3.2984953665908674, "learning_rate": 3.6693883772974144e-05, "loss": 4.1972, "step": 3700 }, { "epoch": 4.23, "eval_blimp_filtered_avg": 0.5216417910447761, "eval_blimp_filtered_std": 0.005742395525976493, "step": 3700 }, { "epoch": 4.23, "eval_blimp_supplement_avg": 0.4676724137931034, "eval_blimp_supplement_std": 0.02280067506813048, "step": 3700 }, { "epoch": 4.23, "eval_vqa_filtered_avg": 0.36, "eval_vqa_filtered_std": 0.04824181513244218, "step": 3700 }, { "epoch": 4.23, "eval_winoground_filtered_avg": 0.48, "eval_winoground_filtered_std": 0.05021167315686779, "step": 3700 }, { "epoch": 4.23, "grad_norm": 2.7526079330224382, "learning_rate": 3.6587505896302227e-05, "loss": 4.008, "step": 3701 }, { "epoch": 4.23, "grad_norm": 4.090880567337932, "learning_rate": 3.6481272427224393e-05, "loss": 4.0863, "step": 3702 }, { "epoch": 4.23, "grad_norm": 4.035055688477091, "learning_rate": 3.637518342397979e-05, "loss": 4.0816, "step": 3703 }, { "epoch": 4.23, "grad_norm": 4.21178928416299, "learning_rate": 3.6269238944728466e-05, "loss": 4.1727, "step": 3704 }, { "epoch": 4.23, "grad_norm": 6.540815260444656, "learning_rate": 3.6163439047550946e-05, "loss": 3.9811, "step": 3705 }, { "epoch": 4.24, "grad_norm": 2.628634926556877, "learning_rate": 3.6057783790448814e-05, "loss": 4.1279, "step": 3706 }, { "epoch": 4.24, "grad_norm": 2.4956472283533713, "learning_rate": 3.595227323134423e-05, "loss": 3.9846, "step": 3707 }, { "epoch": 4.24, "grad_norm": 4.577987377296766, "learning_rate": 3.584690742808e-05, "loss": 4.0898, "step": 3708 }, { "epoch": 4.24, "grad_norm": 3.8111908989259886, "learning_rate": 3.5741686438419604e-05, "loss": 4.1382, "step": 3709 }, { "epoch": 4.24, "grad_norm": 5.582132003604005, "learning_rate": 3.563661032004714e-05, "loss": 3.978, "step": 3710 }, { "epoch": 4.24, "grad_norm": 3.4143706995306538, "learning_rate": 3.55316791305673e-05, "loss": 4.0962, "step": 3711 }, { "epoch": 4.24, "grad_norm": 3.9935429504166398, "learning_rate": 3.5426892927505335e-05, "loss": 4.068, "step": 3712 }, { "epoch": 4.24, "grad_norm": 3.0170125351571455, "learning_rate": 3.532225176830685e-05, "loss": 4.1724, "step": 3713 }, { "epoch": 4.24, "grad_norm": 4.967612913607452, "learning_rate": 3.521775571033817e-05, "loss": 4.1945, "step": 3714 }, { "epoch": 4.25, "grad_norm": 4.635174250852855, "learning_rate": 3.511340481088589e-05, "loss": 4.136, "step": 3715 }, { "epoch": 4.25, "grad_norm": 2.6502685813190148, "learning_rate": 3.500919912715716e-05, "loss": 4.1644, "step": 3716 }, { "epoch": 4.25, "grad_norm": 3.3716035684295087, "learning_rate": 3.4905138716279415e-05, "loss": 4.2265, "step": 3717 }, { "epoch": 4.25, "grad_norm": 3.2420457368299807, "learning_rate": 3.4801223635300546e-05, "loss": 4.3306, "step": 3718 }, { "epoch": 4.25, "grad_norm": 2.907021711673663, "learning_rate": 3.469745394118866e-05, "loss": 4.0928, "step": 3719 }, { "epoch": 4.25, "grad_norm": 5.26213464272231, "learning_rate": 3.459382969083227e-05, "loss": 3.9577, "step": 3720 }, { "epoch": 4.25, "grad_norm": 4.495568705430152, "learning_rate": 3.449035094104012e-05, "loss": 4.1986, "step": 3721 }, { "epoch": 4.25, "grad_norm": 4.58724297607965, "learning_rate": 3.438701774854109e-05, "loss": 4.0034, "step": 3722 }, { "epoch": 4.25, "grad_norm": 3.989978937260802, "learning_rate": 3.4283830169984395e-05, "loss": 4.1339, "step": 3723 }, { "epoch": 4.26, "grad_norm": 3.2465861828309355, "learning_rate": 3.4180788261939386e-05, "loss": 4.0685, "step": 3724 }, { "epoch": 4.26, "grad_norm": 4.35157579539799, "learning_rate": 3.4077892080895474e-05, "loss": 3.9266, "step": 3725 }, { "epoch": 4.26, "grad_norm": 4.0691757092400636, "learning_rate": 3.397514168326231e-05, "loss": 4.2285, "step": 3726 }, { "epoch": 4.26, "grad_norm": 4.8986787404401495, "learning_rate": 3.3872537125369556e-05, "loss": 4.0275, "step": 3727 }, { "epoch": 4.26, "grad_norm": 3.7319715601089967, "learning_rate": 3.37700784634669e-05, "loss": 4.0674, "step": 3728 }, { "epoch": 4.26, "grad_norm": 3.2988017560727783, "learning_rate": 3.3667765753724164e-05, "loss": 4.138, "step": 3729 }, { "epoch": 4.26, "grad_norm": 3.827153042688276, "learning_rate": 3.356559905223092e-05, "loss": 4.1378, "step": 3730 }, { "epoch": 4.26, "grad_norm": 3.939090366109162, "learning_rate": 3.346357841499686e-05, "loss": 4.2655, "step": 3731 }, { "epoch": 4.27, "grad_norm": 4.017774049601149, "learning_rate": 3.336170389795169e-05, "loss": 4.1322, "step": 3732 }, { "epoch": 4.27, "grad_norm": 3.1147577234720685, "learning_rate": 3.3259975556944785e-05, "loss": 3.9091, "step": 3733 }, { "epoch": 4.27, "grad_norm": 3.5275982207146868, "learning_rate": 3.315839344774558e-05, "loss": 4.0453, "step": 3734 }, { "epoch": 4.27, "grad_norm": 4.952638702099205, "learning_rate": 3.305695762604326e-05, "loss": 4.1446, "step": 3735 }, { "epoch": 4.27, "grad_norm": 4.884078842353125, "learning_rate": 3.2955668147446765e-05, "loss": 4.0492, "step": 3736 }, { "epoch": 4.27, "grad_norm": 4.091707525966017, "learning_rate": 3.2854525067484894e-05, "loss": 4.2128, "step": 3737 }, { "epoch": 4.27, "grad_norm": 2.515506749746765, "learning_rate": 3.275352844160621e-05, "loss": 4.0877, "step": 3738 }, { "epoch": 4.27, "grad_norm": 7.483745604250074, "learning_rate": 3.2652678325178774e-05, "loss": 4.0434, "step": 3739 }, { "epoch": 4.27, "grad_norm": 4.007635169698222, "learning_rate": 3.255197477349054e-05, "loss": 4.2162, "step": 3740 }, { "epoch": 4.28, "grad_norm": 4.798756583783412, "learning_rate": 3.245141784174913e-05, "loss": 4.1417, "step": 3741 }, { "epoch": 4.28, "grad_norm": 3.580542706622596, "learning_rate": 3.2351007585081735e-05, "loss": 4.0517, "step": 3742 }, { "epoch": 4.28, "grad_norm": 4.361008295125797, "learning_rate": 3.225074405853499e-05, "loss": 3.9871, "step": 3743 }, { "epoch": 4.28, "grad_norm": 3.1100676214393204, "learning_rate": 3.215062731707524e-05, "loss": 4.2718, "step": 3744 }, { "epoch": 4.28, "grad_norm": 5.751638130430259, "learning_rate": 3.20506574155884e-05, "loss": 3.9137, "step": 3745 }, { "epoch": 4.28, "grad_norm": 2.3515136844064055, "learning_rate": 3.19508344088798e-05, "loss": 4.0942, "step": 3746 }, { "epoch": 4.28, "grad_norm": 5.397947829425868, "learning_rate": 3.18511583516741e-05, "loss": 4.2324, "step": 3747 }, { "epoch": 4.28, "grad_norm": 3.8921474048531586, "learning_rate": 3.175162929861579e-05, "loss": 4.0793, "step": 3748 }, { "epoch": 4.28, "grad_norm": 4.447867557357014, "learning_rate": 3.165224730426839e-05, "loss": 4.0637, "step": 3749 }, { "epoch": 4.29, "grad_norm": 5.527495024213583, "learning_rate": 3.155301242311504e-05, "loss": 4.0633, "step": 3750 }, { "epoch": 4.29, "grad_norm": 3.1689117324916527, "learning_rate": 3.145392470955804e-05, "loss": 4.0836, "step": 3751 }, { "epoch": 4.29, "grad_norm": 4.497776433030314, "learning_rate": 3.1354984217919165e-05, "loss": 3.9597, "step": 3752 }, { "epoch": 4.29, "grad_norm": 4.116794843385568, "learning_rate": 3.1256191002439356e-05, "loss": 3.94, "step": 3753 }, { "epoch": 4.29, "grad_norm": 4.893382217233808, "learning_rate": 3.115754511727896e-05, "loss": 3.9929, "step": 3754 }, { "epoch": 4.29, "grad_norm": 4.537792445403418, "learning_rate": 3.105904661651741e-05, "loss": 4.2817, "step": 3755 }, { "epoch": 4.29, "grad_norm": 3.891264404204084, "learning_rate": 3.0960695554153426e-05, "loss": 4.1314, "step": 3756 }, { "epoch": 4.29, "grad_norm": 3.302319126658895, "learning_rate": 3.086249198410487e-05, "loss": 3.9794, "step": 3757 }, { "epoch": 4.29, "grad_norm": 6.0110122041397185, "learning_rate": 3.076443596020877e-05, "loss": 4.1296, "step": 3758 }, { "epoch": 4.3, "grad_norm": 5.411576387974142, "learning_rate": 3.06665275362213e-05, "loss": 4.1065, "step": 3759 }, { "epoch": 4.3, "grad_norm": 2.6826169263789414, "learning_rate": 3.0568766765817556e-05, "loss": 4.1219, "step": 3760 }, { "epoch": 4.3, "grad_norm": 10.468453446906986, "learning_rate": 3.0471153702591833e-05, "loss": 3.9644, "step": 3761 }, { "epoch": 4.3, "grad_norm": 2.965988932966465, "learning_rate": 3.037368840005746e-05, "loss": 3.9549, "step": 3762 }, { "epoch": 4.3, "grad_norm": 4.041988346680554, "learning_rate": 3.0276370911646665e-05, "loss": 4.216, "step": 3763 }, { "epoch": 4.3, "grad_norm": 6.7040440028784865, "learning_rate": 3.0179201290710696e-05, "loss": 4.0198, "step": 3764 }, { "epoch": 4.3, "grad_norm": 2.806691721193337, "learning_rate": 3.0082179590519784e-05, "loss": 4.0215, "step": 3765 }, { "epoch": 4.3, "grad_norm": 2.831086579574943, "learning_rate": 2.9985305864262942e-05, "loss": 4.1985, "step": 3766 }, { "epoch": 4.31, "grad_norm": 3.8080458885178774, "learning_rate": 2.988858016504826e-05, "loss": 3.9085, "step": 3767 }, { "epoch": 4.31, "grad_norm": 4.363826542631269, "learning_rate": 2.9792002545902415e-05, "loss": 4.0654, "step": 3768 }, { "epoch": 4.31, "grad_norm": 3.2087842588090716, "learning_rate": 2.9695573059771127e-05, "loss": 4.1361, "step": 3769 }, { "epoch": 4.31, "grad_norm": 5.534174294269441, "learning_rate": 2.9599291759518738e-05, "loss": 4.0574, "step": 3770 }, { "epoch": 4.31, "grad_norm": 3.947671130679422, "learning_rate": 2.9503158697928532e-05, "loss": 4.1496, "step": 3771 }, { "epoch": 4.31, "grad_norm": 6.107559652326083, "learning_rate": 2.9407173927702344e-05, "loss": 4.1242, "step": 3772 }, { "epoch": 4.31, "grad_norm": 4.533618936030546, "learning_rate": 2.931133750146085e-05, "loss": 3.9442, "step": 3773 }, { "epoch": 4.31, "grad_norm": 7.433255515494958, "learning_rate": 2.9215649471743364e-05, "loss": 4.1676, "step": 3774 }, { "epoch": 4.31, "grad_norm": 3.3608388532613396, "learning_rate": 2.912010989100778e-05, "loss": 4.032, "step": 3775 }, { "epoch": 4.32, "grad_norm": 4.493878294886374, "learning_rate": 2.9024718811630744e-05, "loss": 4.1605, "step": 3776 }, { "epoch": 4.32, "grad_norm": 5.934285836371989, "learning_rate": 2.8929476285907317e-05, "loss": 4.1568, "step": 3777 }, { "epoch": 4.32, "grad_norm": 5.215548634543467, "learning_rate": 2.88343823660512e-05, "loss": 3.8765, "step": 3778 }, { "epoch": 4.32, "grad_norm": 13.316160606592792, "learning_rate": 2.8739437104194664e-05, "loss": 4.0842, "step": 3779 }, { "epoch": 4.32, "grad_norm": 3.254142631962817, "learning_rate": 2.8644640552388565e-05, "loss": 4.2111, "step": 3780 }, { "epoch": 4.32, "grad_norm": 3.489192298091436, "learning_rate": 2.854999276260197e-05, "loss": 4.2083, "step": 3781 }, { "epoch": 4.32, "grad_norm": 6.319543110610224, "learning_rate": 2.845549378672257e-05, "loss": 4.0847, "step": 3782 }, { "epoch": 4.32, "grad_norm": 6.885325918411502, "learning_rate": 2.836114367655653e-05, "loss": 4.3508, "step": 3783 }, { "epoch": 4.32, "grad_norm": 5.5876465695815, "learning_rate": 2.8266942483828303e-05, "loss": 4.1069, "step": 3784 }, { "epoch": 4.33, "grad_norm": 4.7869598042959955, "learning_rate": 2.817289026018068e-05, "loss": 4.0376, "step": 3785 }, { "epoch": 4.33, "grad_norm": 3.988014212724107, "learning_rate": 2.8078987057174783e-05, "loss": 3.9891, "step": 3786 }, { "epoch": 4.33, "grad_norm": 4.069915436113912, "learning_rate": 2.7985232926290134e-05, "loss": 4.265, "step": 3787 }, { "epoch": 4.33, "grad_norm": 3.690971499610017, "learning_rate": 2.7891627918924587e-05, "loss": 4.1365, "step": 3788 }, { "epoch": 4.33, "grad_norm": 7.3708266141108965, "learning_rate": 2.7798172086393957e-05, "loss": 3.9809, "step": 3789 }, { "epoch": 4.33, "grad_norm": 2.165809087849634, "learning_rate": 2.7704865479932558e-05, "loss": 4.1794, "step": 3790 }, { "epoch": 4.33, "grad_norm": 3.3979500301935404, "learning_rate": 2.7611708150692745e-05, "loss": 4.0439, "step": 3791 }, { "epoch": 4.33, "grad_norm": 4.24502569473556, "learning_rate": 2.7518700149745133e-05, "loss": 4.0118, "step": 3792 }, { "epoch": 4.33, "grad_norm": 6.597760823323774, "learning_rate": 2.7425841528078462e-05, "loss": 4.0947, "step": 3793 }, { "epoch": 4.34, "grad_norm": 2.730048463899977, "learning_rate": 2.733313233659935e-05, "loss": 4.013, "step": 3794 }, { "epoch": 4.34, "grad_norm": 5.367709487329057, "learning_rate": 2.7240572626132906e-05, "loss": 4.2193, "step": 3795 }, { "epoch": 4.34, "grad_norm": 4.331914344538612, "learning_rate": 2.7148162447422005e-05, "loss": 4.2947, "step": 3796 }, { "epoch": 4.34, "grad_norm": 3.5072764017284928, "learning_rate": 2.7055901851127625e-05, "loss": 4.1933, "step": 3797 }, { "epoch": 4.34, "grad_norm": 5.724973039135592, "learning_rate": 2.6963790887828673e-05, "loss": 4.1064, "step": 3798 }, { "epoch": 4.34, "grad_norm": 5.148735889942877, "learning_rate": 2.6871829608022088e-05, "loss": 3.942, "step": 3799 }, { "epoch": 4.34, "grad_norm": 4.0269166696466305, "learning_rate": 2.678001806212281e-05, "loss": 4.1367, "step": 3800 }, { "epoch": 4.34, "eval_blimp_filtered_avg": 0.5238805970149254, "eval_blimp_filtered_std": 0.00569646309909148, "step": 3800 }, { "epoch": 4.34, "eval_blimp_supplement_avg": 0.49137931034482757, "eval_blimp_supplement_std": 0.02270775243605513, "step": 3800 }, { "epoch": 4.34, "eval_vqa_filtered_avg": 0.37, "eval_vqa_filtered_std": 0.048523658709391, "step": 3800 }, { "epoch": 4.34, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 3800 }, { "epoch": 4.34, "grad_norm": 4.7550151190419125, "learning_rate": 2.6688356300463576e-05, "loss": 4.07, "step": 3801 }, { "epoch": 4.35, "grad_norm": 5.8967227015819, "learning_rate": 2.659684437329499e-05, "loss": 4.1135, "step": 3802 }, { "epoch": 4.35, "grad_norm": 20.765623988872903, "learning_rate": 2.6505482330785688e-05, "loss": 4.0943, "step": 3803 }, { "epoch": 4.35, "grad_norm": 5.008706615866194, "learning_rate": 2.6414270223022007e-05, "loss": 4.1475, "step": 3804 }, { "epoch": 4.35, "grad_norm": 3.8910604432650433, "learning_rate": 2.632320810000814e-05, "loss": 4.256, "step": 3805 }, { "epoch": 4.35, "grad_norm": 6.890204550357026, "learning_rate": 2.6232296011665933e-05, "loss": 4.2697, "step": 3806 }, { "epoch": 4.35, "grad_norm": 3.2895377547913154, "learning_rate": 2.614153400783511e-05, "loss": 4.0574, "step": 3807 }, { "epoch": 4.35, "grad_norm": 24.293456052943537, "learning_rate": 2.605092213827307e-05, "loss": 3.9068, "step": 3808 }, { "epoch": 4.35, "grad_norm": 6.945292892163397, "learning_rate": 2.596046045265494e-05, "loss": 4.2262, "step": 3809 }, { "epoch": 4.35, "grad_norm": 3.9698742558846565, "learning_rate": 2.587014900057349e-05, "loss": 4.0786, "step": 3810 }, { "epoch": 4.36, "grad_norm": 3.2519604057316114, "learning_rate": 2.5779987831539083e-05, "loss": 4.0985, "step": 3811 }, { "epoch": 4.36, "grad_norm": 4.938974606795237, "learning_rate": 2.5689976994979812e-05, "loss": 4.1996, "step": 3812 }, { "epoch": 4.36, "grad_norm": 4.4893931064334565, "learning_rate": 2.56001165402412e-05, "loss": 4.0054, "step": 3813 }, { "epoch": 4.36, "grad_norm": 3.354378933825482, "learning_rate": 2.551040651658648e-05, "loss": 3.9906, "step": 3814 }, { "epoch": 4.36, "grad_norm": 4.123552870622024, "learning_rate": 2.5420846973196273e-05, "loss": 4.057, "step": 3815 }, { "epoch": 4.36, "grad_norm": 5.37460518893227, "learning_rate": 2.533143795916881e-05, "loss": 4.3089, "step": 3816 }, { "epoch": 4.36, "grad_norm": 4.293479156755431, "learning_rate": 2.5242179523519744e-05, "loss": 4.0919, "step": 3817 }, { "epoch": 4.36, "grad_norm": 3.8544130527148943, "learning_rate": 2.5153071715182205e-05, "loss": 4.1266, "step": 3818 }, { "epoch": 4.36, "grad_norm": 6.56426917808491, "learning_rate": 2.506411458300678e-05, "loss": 4.085, "step": 3819 }, { "epoch": 4.37, "grad_norm": 4.1210491802769, "learning_rate": 2.4975308175761356e-05, "loss": 4.0192, "step": 3820 }, { "epoch": 4.37, "grad_norm": 2.9384723610382792, "learning_rate": 2.488665254213129e-05, "loss": 4.3127, "step": 3821 }, { "epoch": 4.37, "grad_norm": 3.7590516793194664, "learning_rate": 2.479814773071924e-05, "loss": 4.0799, "step": 3822 }, { "epoch": 4.37, "grad_norm": 2.420579156358614, "learning_rate": 2.470979379004513e-05, "loss": 3.9447, "step": 3823 }, { "epoch": 4.37, "grad_norm": 8.877477216620878, "learning_rate": 2.4621590768546238e-05, "loss": 4.1021, "step": 3824 }, { "epoch": 4.37, "grad_norm": 4.164044314940858, "learning_rate": 2.4533538714577095e-05, "loss": 4.2391, "step": 3825 }, { "epoch": 4.37, "grad_norm": 6.4109933695554595, "learning_rate": 2.4445637676409412e-05, "loss": 4.1356, "step": 3826 }, { "epoch": 4.37, "grad_norm": 4.381753132626693, "learning_rate": 2.4357887702232347e-05, "loss": 4.2066, "step": 3827 }, { "epoch": 4.37, "grad_norm": 2.1834975332567375, "learning_rate": 2.4270288840151852e-05, "loss": 4.3794, "step": 3828 }, { "epoch": 4.38, "grad_norm": 3.2518974572145205, "learning_rate": 2.418284113819131e-05, "loss": 4.2172, "step": 3829 }, { "epoch": 4.38, "grad_norm": 3.5774363515226115, "learning_rate": 2.4095544644291177e-05, "loss": 4.2209, "step": 3830 }, { "epoch": 4.38, "grad_norm": 3.1999596120992013, "learning_rate": 2.4008399406309063e-05, "loss": 4.1799, "step": 3831 }, { "epoch": 4.38, "grad_norm": 3.124570791711505, "learning_rate": 2.3921405472019462e-05, "loss": 4.3332, "step": 3832 }, { "epoch": 4.38, "grad_norm": 3.1565157956334753, "learning_rate": 2.383456288911415e-05, "loss": 4.215, "step": 3833 }, { "epoch": 4.38, "grad_norm": 3.4734008546167416, "learning_rate": 2.3747871705201716e-05, "loss": 4.1438, "step": 3834 }, { "epoch": 4.38, "grad_norm": 7.566821586523008, "learning_rate": 2.3661331967808107e-05, "loss": 4.1506, "step": 3835 }, { "epoch": 4.38, "grad_norm": 3.269024291708266, "learning_rate": 2.357494372437577e-05, "loss": 4.3316, "step": 3836 }, { "epoch": 4.39, "grad_norm": 2.6711250873853905, "learning_rate": 2.3488707022264464e-05, "loss": 4.0621, "step": 3837 }, { "epoch": 4.39, "grad_norm": 4.40859373247557, "learning_rate": 2.3402621908750694e-05, "loss": 4.1106, "step": 3838 }, { "epoch": 4.39, "grad_norm": 4.170365817322545, "learning_rate": 2.3316688431027952e-05, "loss": 3.9865, "step": 3839 }, { "epoch": 4.39, "grad_norm": 6.189474034725737, "learning_rate": 2.3230906636206482e-05, "loss": 4.081, "step": 3840 }, { "epoch": 4.39, "grad_norm": 3.812676991876167, "learning_rate": 2.314527657131343e-05, "loss": 4.0213, "step": 3841 }, { "epoch": 4.39, "grad_norm": 5.425686111449401, "learning_rate": 2.3059798283292887e-05, "loss": 4.023, "step": 3842 }, { "epoch": 4.39, "grad_norm": 6.398138794209846, "learning_rate": 2.297447181900559e-05, "loss": 4.071, "step": 3843 }, { "epoch": 4.39, "grad_norm": 3.1383567278285667, "learning_rate": 2.288929722522913e-05, "loss": 4.1399, "step": 3844 }, { "epoch": 4.39, "grad_norm": 6.609205842692176, "learning_rate": 2.2804274548657687e-05, "loss": 3.9689, "step": 3845 }, { "epoch": 4.4, "grad_norm": 3.390935025418311, "learning_rate": 2.2719403835902307e-05, "loss": 4.0398, "step": 3846 }, { "epoch": 4.4, "grad_norm": 3.5678875269113712, "learning_rate": 2.2634685133490672e-05, "loss": 4.0065, "step": 3847 }, { "epoch": 4.4, "grad_norm": 4.000732662092567, "learning_rate": 2.25501184878672e-05, "loss": 4.1565, "step": 3848 }, { "epoch": 4.4, "grad_norm": 3.3728431842826514, "learning_rate": 2.2465703945392797e-05, "loss": 4.2647, "step": 3849 }, { "epoch": 4.4, "grad_norm": 4.700006963145578, "learning_rate": 2.2381441552345124e-05, "loss": 4.2847, "step": 3850 }, { "epoch": 4.4, "grad_norm": 3.7698096040659426, "learning_rate": 2.229733135491839e-05, "loss": 4.1292, "step": 3851 }, { "epoch": 4.4, "grad_norm": 3.815575865875677, "learning_rate": 2.2213373399223378e-05, "loss": 4.0985, "step": 3852 }, { "epoch": 4.4, "grad_norm": 4.642830124100103, "learning_rate": 2.2129567731287334e-05, "loss": 4.1428, "step": 3853 }, { "epoch": 4.4, "grad_norm": 3.211811374695382, "learning_rate": 2.2045914397054077e-05, "loss": 4.2421, "step": 3854 }, { "epoch": 4.41, "grad_norm": 3.3386146964600076, "learning_rate": 2.196241344238392e-05, "loss": 4.0981, "step": 3855 }, { "epoch": 4.41, "grad_norm": 5.274851722822476, "learning_rate": 2.1879064913053646e-05, "loss": 4.2274, "step": 3856 }, { "epoch": 4.41, "grad_norm": 3.404555699431231, "learning_rate": 2.179586885475647e-05, "loss": 4.0496, "step": 3857 }, { "epoch": 4.41, "grad_norm": 7.233665538698727, "learning_rate": 2.1712825313101946e-05, "loss": 4.0601, "step": 3858 }, { "epoch": 4.41, "grad_norm": 3.9364645522227626, "learning_rate": 2.162993433361615e-05, "loss": 4.0828, "step": 3859 }, { "epoch": 4.41, "grad_norm": 4.970550785926615, "learning_rate": 2.154719596174147e-05, "loss": 4.125, "step": 3860 }, { "epoch": 4.41, "grad_norm": 4.30931270122184, "learning_rate": 2.1464610242836487e-05, "loss": 4.3251, "step": 3861 }, { "epoch": 4.41, "grad_norm": 11.205974375714858, "learning_rate": 2.138217722217629e-05, "loss": 4.2038, "step": 3862 }, { "epoch": 4.41, "grad_norm": 3.812922715254097, "learning_rate": 2.1299896944952156e-05, "loss": 4.2154, "step": 3863 }, { "epoch": 4.42, "grad_norm": 4.20413541849482, "learning_rate": 2.1217769456271706e-05, "loss": 4.149, "step": 3864 }, { "epoch": 4.42, "grad_norm": 5.742868949595403, "learning_rate": 2.1135794801158722e-05, "loss": 4.0124, "step": 3865 }, { "epoch": 4.42, "grad_norm": 3.3863844613197833, "learning_rate": 2.1053973024553193e-05, "loss": 4.1052, "step": 3866 }, { "epoch": 4.42, "grad_norm": 4.9377932511227876, "learning_rate": 2.097230417131137e-05, "loss": 4.0703, "step": 3867 }, { "epoch": 4.42, "grad_norm": 4.121486202712329, "learning_rate": 2.0890788286205606e-05, "loss": 4.145, "step": 3868 }, { "epoch": 4.42, "grad_norm": 4.412624094734753, "learning_rate": 2.0809425413924452e-05, "loss": 4.0887, "step": 3869 }, { "epoch": 4.42, "grad_norm": 4.472232765225054, "learning_rate": 2.072821559907246e-05, "loss": 3.9357, "step": 3870 }, { "epoch": 4.42, "grad_norm": 4.50211320367663, "learning_rate": 2.064715888617039e-05, "loss": 4.1617, "step": 3871 }, { "epoch": 4.43, "grad_norm": 4.035795294862242, "learning_rate": 2.0566255319655056e-05, "loss": 4.1874, "step": 3872 }, { "epoch": 4.43, "grad_norm": 2.7987871680955223, "learning_rate": 2.048550494387925e-05, "loss": 4.0861, "step": 3873 }, { "epoch": 4.43, "grad_norm": 5.454777599491666, "learning_rate": 2.0404907803111827e-05, "loss": 4.0772, "step": 3874 }, { "epoch": 4.43, "grad_norm": 7.163448286190371, "learning_rate": 2.032446394153764e-05, "loss": 3.9419, "step": 3875 }, { "epoch": 4.43, "grad_norm": 3.669604924607669, "learning_rate": 2.024417340325748e-05, "loss": 4.1684, "step": 3876 }, { "epoch": 4.43, "grad_norm": 3.4064725605343105, "learning_rate": 2.0164036232288205e-05, "loss": 4.1339, "step": 3877 }, { "epoch": 4.43, "grad_norm": 4.257421647330552, "learning_rate": 2.0084052472562305e-05, "loss": 4.2213, "step": 3878 }, { "epoch": 4.43, "grad_norm": 2.9029162854211825, "learning_rate": 2.0004222167928474e-05, "loss": 3.9255, "step": 3879 }, { "epoch": 4.43, "grad_norm": 3.7620326421586947, "learning_rate": 1.9924545362151134e-05, "loss": 4.1038, "step": 3880 }, { "epoch": 4.44, "grad_norm": 5.058812665617539, "learning_rate": 1.9845022098910545e-05, "loss": 4.0792, "step": 3881 }, { "epoch": 4.44, "grad_norm": 3.644749911471363, "learning_rate": 1.9765652421802968e-05, "loss": 4.0537, "step": 3882 }, { "epoch": 4.44, "grad_norm": 3.4658443829354626, "learning_rate": 1.9686436374340197e-05, "loss": 4.0949, "step": 3883 }, { "epoch": 4.44, "grad_norm": 3.5054732597974407, "learning_rate": 1.9607373999949927e-05, "loss": 4.0198, "step": 3884 }, { "epoch": 4.44, "grad_norm": 4.177892876419979, "learning_rate": 1.9528465341975653e-05, "loss": 4.0142, "step": 3885 }, { "epoch": 4.44, "grad_norm": 3.2291717176165857, "learning_rate": 1.94497104436766e-05, "loss": 3.8081, "step": 3886 }, { "epoch": 4.44, "grad_norm": 3.8489401845469153, "learning_rate": 1.9371109348227577e-05, "loss": 4.048, "step": 3887 }, { "epoch": 4.44, "grad_norm": 3.9313916773883264, "learning_rate": 1.9292662098719137e-05, "loss": 4.1949, "step": 3888 }, { "epoch": 4.44, "grad_norm": 4.03253098206425, "learning_rate": 1.9214368738157615e-05, "loss": 3.9993, "step": 3889 }, { "epoch": 4.45, "grad_norm": 3.3824406785044854, "learning_rate": 1.9136229309464877e-05, "loss": 4.0129, "step": 3890 }, { "epoch": 4.45, "grad_norm": 3.853547916210705, "learning_rate": 1.9058243855478294e-05, "loss": 3.8151, "step": 3891 }, { "epoch": 4.45, "grad_norm": 3.758952968753591, "learning_rate": 1.8980412418950998e-05, "loss": 4.2905, "step": 3892 }, { "epoch": 4.45, "grad_norm": 4.11413985775139, "learning_rate": 1.8902735042551588e-05, "loss": 3.9722, "step": 3893 }, { "epoch": 4.45, "grad_norm": 1.7573348727455205, "learning_rate": 1.8825211768864333e-05, "loss": 4.0775, "step": 3894 }, { "epoch": 4.45, "grad_norm": 2.991314772878765, "learning_rate": 1.8747842640388745e-05, "loss": 4.0089, "step": 3895 }, { "epoch": 4.45, "grad_norm": 5.895143857480241, "learning_rate": 1.867062769954015e-05, "loss": 4.1536, "step": 3896 }, { "epoch": 4.45, "grad_norm": 3.891802227163627, "learning_rate": 1.8593566988649166e-05, "loss": 4.026, "step": 3897 }, { "epoch": 4.45, "grad_norm": 3.0807485937177264, "learning_rate": 1.8516660549961893e-05, "loss": 4.128, "step": 3898 }, { "epoch": 4.46, "grad_norm": 3.54402614713139, "learning_rate": 1.843990842563994e-05, "loss": 3.9487, "step": 3899 }, { "epoch": 4.46, "grad_norm": 1.8864535885239648, "learning_rate": 1.836331065776011e-05, "loss": 4.232, "step": 3900 }, { "epoch": 4.46, "eval_blimp_filtered_avg": 0.5240298507462686, "eval_blimp_filtered_std": 0.005720055690947627, "step": 3900 }, { "epoch": 4.46, "eval_blimp_supplement_avg": 0.47844827586206895, "eval_blimp_supplement_std": 0.022917691086984965, "step": 3900 }, { "epoch": 4.46, "eval_vqa_filtered_avg": 0.37, "eval_vqa_filtered_std": 0.048523658709391, "step": 3900 }, { "epoch": 4.46, "eval_winoground_filtered_avg": 0.52, "eval_winoground_filtered_std": 0.05021167315686779, "step": 3900 }, { "epoch": 4.46, "grad_norm": 4.036710324362638, "learning_rate": 1.8286867288314754e-05, "loss": 4.2407, "step": 3901 }, { "epoch": 4.46, "grad_norm": 3.8138743009811846, "learning_rate": 1.821057835921158e-05, "loss": 3.9976, "step": 3902 }, { "epoch": 4.46, "grad_norm": 3.3309643983346153, "learning_rate": 1.8134443912273534e-05, "loss": 4.14, "step": 3903 }, { "epoch": 4.46, "grad_norm": 4.305424559034067, "learning_rate": 1.805846398923896e-05, "loss": 4.1639, "step": 3904 }, { "epoch": 4.46, "grad_norm": 3.012289768678228, "learning_rate": 1.798263863176147e-05, "loss": 4.1773, "step": 3905 }, { "epoch": 4.46, "grad_norm": 2.7629929835496863, "learning_rate": 1.7906967881409918e-05, "loss": 4.196, "step": 3906 }, { "epoch": 4.47, "grad_norm": 6.07052158876355, "learning_rate": 1.7831451779668417e-05, "loss": 3.9719, "step": 3907 }, { "epoch": 4.47, "grad_norm": 3.420958220761249, "learning_rate": 1.7756090367936215e-05, "loss": 4.3165, "step": 3908 }, { "epoch": 4.47, "grad_norm": 3.823735914002226, "learning_rate": 1.768088368752787e-05, "loss": 4.097, "step": 3909 }, { "epoch": 4.47, "grad_norm": 3.482087814711469, "learning_rate": 1.7605831779673098e-05, "loss": 4.2187, "step": 3910 }, { "epoch": 4.47, "grad_norm": 3.6068470399106167, "learning_rate": 1.7530934685516752e-05, "loss": 3.9933, "step": 3911 }, { "epoch": 4.47, "grad_norm": 3.899476326589671, "learning_rate": 1.7456192446118754e-05, "loss": 4.0122, "step": 3912 }, { "epoch": 4.47, "grad_norm": 3.677258195689179, "learning_rate": 1.73816051024542e-05, "loss": 4.0117, "step": 3913 }, { "epoch": 4.47, "grad_norm": 4.467881427083908, "learning_rate": 1.730717269541324e-05, "loss": 3.9551, "step": 3914 }, { "epoch": 4.47, "grad_norm": 3.8131732636552607, "learning_rate": 1.723289526580114e-05, "loss": 4.0231, "step": 3915 }, { "epoch": 4.48, "grad_norm": 3.273003399644787, "learning_rate": 1.7158772854338155e-05, "loss": 4.2193, "step": 3916 }, { "epoch": 4.48, "grad_norm": 2.7661766774048364, "learning_rate": 1.708480550165948e-05, "loss": 4.0163, "step": 3917 }, { "epoch": 4.48, "grad_norm": 3.811222476293808, "learning_rate": 1.701099324831545e-05, "loss": 4.1843, "step": 3918 }, { "epoch": 4.48, "grad_norm": 5.703627188338064, "learning_rate": 1.6937336134771263e-05, "loss": 4.2539, "step": 3919 }, { "epoch": 4.48, "grad_norm": 6.876104372623802, "learning_rate": 1.6863834201407144e-05, "loss": 3.9532, "step": 3920 }, { "epoch": 4.48, "grad_norm": 4.001749009849482, "learning_rate": 1.67904874885182e-05, "loss": 4.0959, "step": 3921 }, { "epoch": 4.48, "grad_norm": 4.656915046948665, "learning_rate": 1.6717296036314463e-05, "loss": 4.0539, "step": 3922 }, { "epoch": 4.48, "grad_norm": 2.795198613585797, "learning_rate": 1.664425988492084e-05, "loss": 3.9267, "step": 3923 }, { "epoch": 4.48, "grad_norm": 2.683432220071694, "learning_rate": 1.6571379074377134e-05, "loss": 4.1989, "step": 3924 }, { "epoch": 4.49, "grad_norm": 4.945996840061124, "learning_rate": 1.649865364463785e-05, "loss": 4.0086, "step": 3925 }, { "epoch": 4.49, "grad_norm": 3.926512945913584, "learning_rate": 1.642608363557246e-05, "loss": 4.1477, "step": 3926 }, { "epoch": 4.49, "grad_norm": 4.725766723767193, "learning_rate": 1.6353669086965216e-05, "loss": 4.1185, "step": 3927 }, { "epoch": 4.49, "grad_norm": 3.931582848248367, "learning_rate": 1.6281410038515053e-05, "loss": 4.2814, "step": 3928 }, { "epoch": 4.49, "grad_norm": 4.394203660682757, "learning_rate": 1.6209306529835785e-05, "loss": 4.1393, "step": 3929 }, { "epoch": 4.49, "grad_norm": 3.5199227968388143, "learning_rate": 1.613735860045583e-05, "loss": 4.3944, "step": 3930 }, { "epoch": 4.49, "grad_norm": 3.1359824199952717, "learning_rate": 1.60655662898184e-05, "loss": 4.0104, "step": 3931 }, { "epoch": 4.49, "grad_norm": 3.49308502029687, "learning_rate": 1.5993929637281344e-05, "loss": 4.1539, "step": 3932 }, { "epoch": 4.49, "grad_norm": 7.451362666064646, "learning_rate": 1.5922448682117282e-05, "loss": 4.0051, "step": 3933 }, { "epoch": 4.5, "grad_norm": 6.211810619419057, "learning_rate": 1.5851123463513272e-05, "loss": 4.1456, "step": 3934 }, { "epoch": 4.5, "grad_norm": 4.614487481744874, "learning_rate": 1.577995402057113e-05, "loss": 4.0574, "step": 3935 }, { "epoch": 4.5, "grad_norm": 3.0930080769480437, "learning_rate": 1.570894039230731e-05, "loss": 4.0438, "step": 3936 }, { "epoch": 4.5, "grad_norm": 2.6053256819118706, "learning_rate": 1.5638082617652847e-05, "loss": 4.2941, "step": 3937 }, { "epoch": 4.5, "grad_norm": 5.958967936147854, "learning_rate": 1.5567380735453193e-05, "loss": 4.0967, "step": 3938 }, { "epoch": 4.5, "grad_norm": 4.512421496299159, "learning_rate": 1.5496834784468414e-05, "loss": 4.126, "step": 3939 }, { "epoch": 4.5, "grad_norm": 6.807071535368659, "learning_rate": 1.542644480337317e-05, "loss": 4.0106, "step": 3940 }, { "epoch": 4.5, "grad_norm": 3.0776721785249777, "learning_rate": 1.5356210830756533e-05, "loss": 4.0628, "step": 3941 }, { "epoch": 4.51, "grad_norm": 3.9227529645873216, "learning_rate": 1.5286132905121962e-05, "loss": 4.1682, "step": 3942 }, { "epoch": 4.51, "grad_norm": 4.6294698762562705, "learning_rate": 1.5216211064887628e-05, "loss": 4.032, "step": 3943 }, { "epoch": 4.51, "grad_norm": 5.290366610125381, "learning_rate": 1.5146445348385872e-05, "loss": 4.1652, "step": 3944 }, { "epoch": 4.51, "grad_norm": 2.374226090818631, "learning_rate": 1.5076835793863629e-05, "loss": 4.2616, "step": 3945 }, { "epoch": 4.51, "grad_norm": 6.486212150480482, "learning_rate": 1.5007382439482074e-05, "loss": 3.8761, "step": 3946 }, { "epoch": 4.51, "grad_norm": 6.1765575786850935, "learning_rate": 1.4938085323316817e-05, "loss": 4.1172, "step": 3947 }, { "epoch": 4.51, "grad_norm": 3.488875532024985, "learning_rate": 1.4868944483357836e-05, "loss": 4.0738, "step": 3948 }, { "epoch": 4.51, "grad_norm": 5.962398051882274, "learning_rate": 1.479995995750941e-05, "loss": 3.9247, "step": 3949 }, { "epoch": 4.51, "grad_norm": 4.513844327520555, "learning_rate": 1.4731131783590156e-05, "loss": 4.201, "step": 3950 }, { "epoch": 4.52, "grad_norm": 3.936683447187226, "learning_rate": 1.466245999933292e-05, "loss": 4.0537, "step": 3951 }, { "epoch": 4.52, "grad_norm": 3.680512408427049, "learning_rate": 1.4593944642384859e-05, "loss": 4.1327, "step": 3952 }, { "epoch": 4.52, "grad_norm": 3.9738319548586585, "learning_rate": 1.4525585750307356e-05, "loss": 4.0406, "step": 3953 }, { "epoch": 4.52, "grad_norm": 5.738394397694501, "learning_rate": 1.445738336057607e-05, "loss": 4.1436, "step": 3954 }, { "epoch": 4.52, "grad_norm": 4.224894510638059, "learning_rate": 1.4389337510580689e-05, "loss": 4.0665, "step": 3955 }, { "epoch": 4.52, "grad_norm": 3.620272063865558, "learning_rate": 1.4321448237625277e-05, "loss": 4.1797, "step": 3956 }, { "epoch": 4.52, "grad_norm": 2.7039392136664415, "learning_rate": 1.4253715578927993e-05, "loss": 4.1106, "step": 3957 }, { "epoch": 4.52, "grad_norm": 2.851070299104164, "learning_rate": 1.4186139571621102e-05, "loss": 3.9647, "step": 3958 }, { "epoch": 4.52, "grad_norm": 4.378771558868778, "learning_rate": 1.4118720252751037e-05, "loss": 4.2093, "step": 3959 }, { "epoch": 4.53, "grad_norm": 2.6907952261477015, "learning_rate": 1.4051457659278332e-05, "loss": 4.1396, "step": 3960 }, { "epoch": 4.53, "grad_norm": 4.774372302687286, "learning_rate": 1.3984351828077557e-05, "loss": 4.0438, "step": 3961 }, { "epoch": 4.53, "grad_norm": 4.552835756715325, "learning_rate": 1.3917402795937383e-05, "loss": 4.0981, "step": 3962 }, { "epoch": 4.53, "grad_norm": 2.539530089940984, "learning_rate": 1.3850610599560486e-05, "loss": 4.2572, "step": 3963 }, { "epoch": 4.53, "grad_norm": 3.3820212224055775, "learning_rate": 1.3783975275563608e-05, "loss": 4.0154, "step": 3964 }, { "epoch": 4.53, "grad_norm": 2.8523872159918247, "learning_rate": 1.3717496860477428e-05, "loss": 3.8822, "step": 3965 }, { "epoch": 4.53, "grad_norm": 20.36807453832221, "learning_rate": 1.3651175390746694e-05, "loss": 4.1767, "step": 3966 }, { "epoch": 4.53, "grad_norm": 3.0787810487871905, "learning_rate": 1.3585010902729987e-05, "loss": 4.0287, "step": 3967 }, { "epoch": 4.53, "grad_norm": 4.44348091336858, "learning_rate": 1.351900343269996e-05, "loss": 4.1939, "step": 3968 }, { "epoch": 4.54, "grad_norm": 4.778921311712089, "learning_rate": 1.3453153016843132e-05, "loss": 4.2358, "step": 3969 }, { "epoch": 4.54, "grad_norm": 4.7137092158355545, "learning_rate": 1.3387459691259861e-05, "loss": 4.1348, "step": 3970 }, { "epoch": 4.54, "grad_norm": 3.291489950315583, "learning_rate": 1.3321923491964537e-05, "loss": 4.0196, "step": 3971 }, { "epoch": 4.54, "grad_norm": 6.781358490648424, "learning_rate": 1.325654445488522e-05, "loss": 4.0413, "step": 3972 }, { "epoch": 4.54, "grad_norm": 7.388259637001667, "learning_rate": 1.3191322615863909e-05, "loss": 4.2396, "step": 3973 }, { "epoch": 4.54, "grad_norm": 3.548430117398899, "learning_rate": 1.3126258010656466e-05, "loss": 4.0966, "step": 3974 }, { "epoch": 4.54, "grad_norm": 3.7574643280789672, "learning_rate": 1.3061350674932525e-05, "loss": 3.886, "step": 3975 }, { "epoch": 4.54, "grad_norm": 5.26509357266034, "learning_rate": 1.2996600644275423e-05, "loss": 3.9666, "step": 3976 }, { "epoch": 4.55, "grad_norm": 3.1844830808337496, "learning_rate": 1.2932007954182366e-05, "loss": 4.2156, "step": 3977 }, { "epoch": 4.55, "grad_norm": 4.49461094628924, "learning_rate": 1.286757264006426e-05, "loss": 4.2779, "step": 3978 }, { "epoch": 4.55, "grad_norm": 3.88705236971674, "learning_rate": 1.2803294737245751e-05, "loss": 4.089, "step": 3979 }, { "epoch": 4.55, "grad_norm": 2.7741634579103582, "learning_rate": 1.2739174280965115e-05, "loss": 4.1153, "step": 3980 }, { "epoch": 4.55, "grad_norm": 4.393215941798447, "learning_rate": 1.2675211306374367e-05, "loss": 4.1399, "step": 3981 }, { "epoch": 4.55, "grad_norm": 3.415989022053479, "learning_rate": 1.2611405848539224e-05, "loss": 4.0104, "step": 3982 }, { "epoch": 4.55, "grad_norm": 2.5251455459752945, "learning_rate": 1.2547757942439074e-05, "loss": 3.967, "step": 3983 }, { "epoch": 4.55, "grad_norm": 4.329278282412647, "learning_rate": 1.2484267622966771e-05, "loss": 4.0165, "step": 3984 }, { "epoch": 4.55, "grad_norm": 2.810106587340108, "learning_rate": 1.2420934924928938e-05, "loss": 4.1595, "step": 3985 }, { "epoch": 4.56, "grad_norm": 4.4992396329999815, "learning_rate": 1.2357759883045737e-05, "loss": 4.1164, "step": 3986 }, { "epoch": 4.56, "grad_norm": 4.331106530135752, "learning_rate": 1.2294742531950863e-05, "loss": 4.1665, "step": 3987 }, { "epoch": 4.56, "grad_norm": 6.651653581189197, "learning_rate": 1.2231882906191615e-05, "loss": 4.1875, "step": 3988 }, { "epoch": 4.56, "grad_norm": 2.74141841354632, "learning_rate": 1.216918104022876e-05, "loss": 3.9784, "step": 3989 }, { "epoch": 4.56, "grad_norm": 2.806923778733947, "learning_rate": 1.2106636968436633e-05, "loss": 3.8698, "step": 3990 }, { "epoch": 4.56, "grad_norm": 2.706510973596713, "learning_rate": 1.2044250725103111e-05, "loss": 3.9779, "step": 3991 }, { "epoch": 4.56, "grad_norm": 5.949757706081025, "learning_rate": 1.198202234442943e-05, "loss": 4.0682, "step": 3992 }, { "epoch": 4.56, "grad_norm": 5.9484985151257455, "learning_rate": 1.1919951860530341e-05, "loss": 3.9659, "step": 3993 }, { "epoch": 4.56, "grad_norm": 3.77239696220753, "learning_rate": 1.1858039307434019e-05, "loss": 4.022, "step": 3994 }, { "epoch": 4.57, "grad_norm": 3.5662279255718463, "learning_rate": 1.1796284719082084e-05, "loss": 4.2784, "step": 3995 }, { "epoch": 4.57, "grad_norm": 3.6062102579474313, "learning_rate": 1.1734688129329583e-05, "loss": 3.9701, "step": 3996 }, { "epoch": 4.57, "grad_norm": 4.376554116708603, "learning_rate": 1.167324957194481e-05, "loss": 3.9548, "step": 3997 }, { "epoch": 4.57, "grad_norm": 4.617633363802044, "learning_rate": 1.1611969080609585e-05, "loss": 4.1449, "step": 3998 }, { "epoch": 4.57, "grad_norm": 1.878363997098849, "learning_rate": 1.1550846688919001e-05, "loss": 4.1431, "step": 3999 }, { "epoch": 4.57, "grad_norm": 4.915068865934237, "learning_rate": 1.1489882430381492e-05, "loss": 4.161, "step": 4000 }, { "epoch": 4.57, "eval_blimp_filtered_avg": 0.5243283582089552, "eval_blimp_filtered_std": 0.0056953767134424104, "step": 4000 }, { "epoch": 4.57, "eval_blimp_supplement_avg": 0.47198275862068967, "eval_blimp_supplement_std": 0.022638377289993485, "step": 4000 }, { "epoch": 4.57, "eval_vqa_filtered_avg": 0.37, "eval_vqa_filtered_std": 0.048523658709391, "step": 4000 }, { "epoch": 4.57, "eval_winoground_filtered_avg": 0.49, "eval_winoground_filtered_std": 0.05024183937956912, "step": 4000 }, { "epoch": 4.57, "grad_norm": 4.083826968389988, "learning_rate": 1.1429076338418719e-05, "loss": 3.8051, "step": 4001 }, { "epoch": 4.57, "grad_norm": 3.7118379079289876, "learning_rate": 1.1368428446365785e-05, "loss": 4.244, "step": 4002 }, { "epoch": 4.57, "grad_norm": 2.9136873029362773, "learning_rate": 1.1307938787470927e-05, "loss": 4.1234, "step": 4003 }, { "epoch": 4.58, "grad_norm": 2.562723323907262, "learning_rate": 1.1247607394895752e-05, "loss": 3.9621, "step": 4004 }, { "epoch": 4.58, "grad_norm": 6.998071279862643, "learning_rate": 1.1187434301715003e-05, "loss": 4.2605, "step": 4005 }, { "epoch": 4.58, "grad_norm": 3.916202270413966, "learning_rate": 1.1127419540916693e-05, "loss": 4.3383, "step": 4006 }, { "epoch": 4.58, "grad_norm": 3.412082759718138, "learning_rate": 1.1067563145402004e-05, "loss": 3.9657, "step": 4007 }, { "epoch": 4.58, "grad_norm": 27.996960225640553, "learning_rate": 1.1007865147985317e-05, "loss": 3.9547, "step": 4008 }, { "epoch": 4.58, "grad_norm": 5.356330623285319, "learning_rate": 1.0948325581394257e-05, "loss": 4.1502, "step": 4009 }, { "epoch": 4.58, "grad_norm": 3.056214928207012, "learning_rate": 1.0888944478269412e-05, "loss": 4.2495, "step": 4010 }, { "epoch": 4.58, "grad_norm": 2.5108412335351638, "learning_rate": 1.0829721871164643e-05, "loss": 4.1524, "step": 4011 }, { "epoch": 4.59, "grad_norm": 2.807973791115616, "learning_rate": 1.0770657792546844e-05, "loss": 4.1149, "step": 4012 }, { "epoch": 4.59, "grad_norm": 2.263668308563047, "learning_rate": 1.0711752274796082e-05, "loss": 4.0738, "step": 4013 }, { "epoch": 4.59, "grad_norm": 4.333137465929719, "learning_rate": 1.0653005350205423e-05, "loss": 4.1973, "step": 4014 }, { "epoch": 4.59, "grad_norm": 5.613112714212941, "learning_rate": 1.0594417050981009e-05, "loss": 4.1275, "step": 4015 }, { "epoch": 4.59, "grad_norm": 5.0690285261432075, "learning_rate": 1.0535987409242042e-05, "loss": 4.1298, "step": 4016 }, { "epoch": 4.59, "grad_norm": 3.123488549908844, "learning_rate": 1.0477716457020801e-05, "loss": 4.0367, "step": 4017 }, { "epoch": 4.59, "grad_norm": 3.019879265796207, "learning_rate": 1.0419604226262335e-05, "loss": 4.1774, "step": 4018 }, { "epoch": 4.59, "grad_norm": 3.6594758114212547, "learning_rate": 1.036165074882499e-05, "loss": 4.1817, "step": 4019 }, { "epoch": 4.59, "grad_norm": 2.3794368414126112, "learning_rate": 1.0303856056479821e-05, "loss": 4.135, "step": 4020 }, { "epoch": 4.6, "grad_norm": 5.127374048209246, "learning_rate": 1.0246220180911013e-05, "loss": 4.1546, "step": 4021 }, { "epoch": 4.6, "grad_norm": 3.1718755881347525, "learning_rate": 1.0188743153715628e-05, "loss": 4.0469, "step": 4022 }, { "epoch": 4.6, "grad_norm": 6.618431231927645, "learning_rate": 1.0131425006403626e-05, "loss": 3.8927, "step": 4023 }, { "epoch": 4.6, "grad_norm": 4.382420453857184, "learning_rate": 1.0074265770397838e-05, "loss": 4.2084, "step": 4024 }, { "epoch": 4.6, "grad_norm": 4.592646055659577, "learning_rate": 1.0017265477033998e-05, "loss": 3.9096, "step": 4025 }, { "epoch": 4.6, "grad_norm": 2.711023067891441, "learning_rate": 9.960424157560842e-06, "loss": 4.0323, "step": 4026 }, { "epoch": 4.6, "grad_norm": 2.041185437705355, "learning_rate": 9.903741843139679e-06, "loss": 4.1031, "step": 4027 }, { "epoch": 4.6, "grad_norm": 3.450932742159155, "learning_rate": 9.847218564844884e-06, "loss": 4.0284, "step": 4028 }, { "epoch": 4.6, "grad_norm": 5.255149283288173, "learning_rate": 9.790854353663502e-06, "loss": 4.3427, "step": 4029 }, { "epoch": 4.61, "grad_norm": 3.3120025119127794, "learning_rate": 9.734649240495517e-06, "loss": 3.9292, "step": 4030 }, { "epoch": 4.61, "grad_norm": 2.9272064803868387, "learning_rate": 9.678603256153584e-06, "loss": 4.0939, "step": 4031 }, { "epoch": 4.61, "grad_norm": 2.900200339971581, "learning_rate": 9.622716431363086e-06, "loss": 4.0414, "step": 4032 }, { "epoch": 4.61, "grad_norm": 2.5699663865176037, "learning_rate": 9.566988796762288e-06, "loss": 4.1209, "step": 4033 }, { "epoch": 4.61, "grad_norm": 2.81355824986362, "learning_rate": 9.511420382902113e-06, "loss": 4.2825, "step": 4034 }, { "epoch": 4.61, "grad_norm": 3.1218294492556358, "learning_rate": 9.45601122024613e-06, "loss": 3.9088, "step": 4035 }, { "epoch": 4.61, "grad_norm": 3.816996579772586, "learning_rate": 9.400761339170637e-06, "loss": 3.9956, "step": 4036 }, { "epoch": 4.61, "grad_norm": 3.636511063204475, "learning_rate": 9.345670769964742e-06, "loss": 4.0131, "step": 4037 }, { "epoch": 4.61, "grad_norm": 5.7478907603635685, "learning_rate": 9.29073954283005e-06, "loss": 3.9302, "step": 4038 }, { "epoch": 4.62, "grad_norm": 5.3301843211156905, "learning_rate": 9.235967687880941e-06, "loss": 4.0518, "step": 4039 }, { "epoch": 4.62, "grad_norm": 2.1229866602177982, "learning_rate": 9.181355235144261e-06, "loss": 4.1877, "step": 4040 }, { "epoch": 4.62, "grad_norm": 3.4213828393386927, "learning_rate": 9.126902214559628e-06, "loss": 3.9532, "step": 4041 }, { "epoch": 4.62, "grad_norm": 4.325072401805332, "learning_rate": 9.072608655979164e-06, "loss": 4.2427, "step": 4042 }, { "epoch": 4.62, "grad_norm": 3.6586162614661917, "learning_rate": 9.01847458916759e-06, "loss": 4.0063, "step": 4043 }, { "epoch": 4.62, "grad_norm": 5.446208301834507, "learning_rate": 8.96450004380227e-06, "loss": 4.2332, "step": 4044 }, { "epoch": 4.62, "grad_norm": 5.394530438747241, "learning_rate": 8.910685049472965e-06, "loss": 3.9655, "step": 4045 }, { "epoch": 4.62, "grad_norm": 2.751997561818143, "learning_rate": 8.85702963568211e-06, "loss": 4.0227, "step": 4046 }, { "epoch": 4.63, "grad_norm": 2.4020260851078032, "learning_rate": 8.803533831844577e-06, "loss": 4.2375, "step": 4047 }, { "epoch": 4.63, "grad_norm": 3.445989928682379, "learning_rate": 8.750197667287706e-06, "loss": 4.1277, "step": 4048 }, { "epoch": 4.63, "grad_norm": 3.0769752399109507, "learning_rate": 8.69702117125144e-06, "loss": 4.0396, "step": 4049 }, { "epoch": 4.63, "grad_norm": 4.893744754028155, "learning_rate": 8.644004372888059e-06, "loss": 4.1491, "step": 4050 }, { "epoch": 4.63, "grad_norm": 3.148103616379697, "learning_rate": 8.59114730126238e-06, "loss": 4.2078, "step": 4051 }, { "epoch": 4.63, "grad_norm": 2.6100897352198538, "learning_rate": 8.538449985351625e-06, "loss": 4.0135, "step": 4052 }, { "epoch": 4.63, "grad_norm": 4.324480730484191, "learning_rate": 8.485912454045419e-06, "loss": 3.8184, "step": 4053 }, { "epoch": 4.63, "grad_norm": 2.8950820782747355, "learning_rate": 8.433534736145819e-06, "loss": 4.0826, "step": 4054 }, { "epoch": 4.63, "grad_norm": 3.5512675793392017, "learning_rate": 8.381316860367293e-06, "loss": 4.3048, "step": 4055 }, { "epoch": 4.64, "grad_norm": 2.3506913970544767, "learning_rate": 8.329258855336574e-06, "loss": 4.1, "step": 4056 }, { "epoch": 4.64, "grad_norm": 4.752256431195318, "learning_rate": 8.277360749592866e-06, "loss": 4.2809, "step": 4057 }, { "epoch": 4.64, "grad_norm": 3.815785242295265, "learning_rate": 8.225622571587643e-06, "loss": 3.9356, "step": 4058 }, { "epoch": 4.64, "grad_norm": 4.990748870661661, "learning_rate": 8.174044349684783e-06, "loss": 4.013, "step": 4059 }, { "epoch": 4.64, "grad_norm": 39.63887652495673, "learning_rate": 8.122626112160336e-06, "loss": 4.0079, "step": 4060 }, { "epoch": 4.64, "grad_norm": 2.6873759274096294, "learning_rate": 8.071367887202817e-06, "loss": 3.9895, "step": 4061 }, { "epoch": 4.64, "grad_norm": 6.538519490937643, "learning_rate": 8.020269702912885e-06, "loss": 4.1062, "step": 4062 }, { "epoch": 4.64, "grad_norm": 4.871153169002898, "learning_rate": 7.969331587303496e-06, "loss": 4.0802, "step": 4063 }, { "epoch": 4.64, "grad_norm": 2.650666804190377, "learning_rate": 7.918553568299912e-06, "loss": 4.1916, "step": 4064 }, { "epoch": 4.65, "grad_norm": 3.952448132545441, "learning_rate": 7.867935673739534e-06, "loss": 4.1941, "step": 4065 }, { "epoch": 4.65, "grad_norm": 3.543736276089882, "learning_rate": 7.817477931372063e-06, "loss": 4.0595, "step": 4066 }, { "epoch": 4.65, "grad_norm": 5.520349085421009, "learning_rate": 7.767180368859305e-06, "loss": 3.9056, "step": 4067 }, { "epoch": 4.65, "grad_norm": 4.191234944668208, "learning_rate": 7.717043013775404e-06, "loss": 4.1407, "step": 4068 }, { "epoch": 4.65, "grad_norm": 3.5499970724822005, "learning_rate": 7.667065893606506e-06, "loss": 3.8692, "step": 4069 }, { "epoch": 4.65, "grad_norm": 2.342515503237021, "learning_rate": 7.6172490357510275e-06, "loss": 4.1101, "step": 4070 }, { "epoch": 4.65, "grad_norm": 3.292414967483802, "learning_rate": 7.56759246751949e-06, "loss": 4.0174, "step": 4071 }, { "epoch": 4.65, "grad_norm": 3.6403307810700034, "learning_rate": 7.51809621613455e-06, "loss": 3.9366, "step": 4072 }, { "epoch": 4.65, "grad_norm": 3.391342845461786, "learning_rate": 7.468760308730937e-06, "loss": 4.2533, "step": 4073 }, { "epoch": 4.66, "grad_norm": 3.2566946755404818, "learning_rate": 7.419584772355514e-06, "loss": 3.9729, "step": 4074 }, { "epoch": 4.66, "grad_norm": 2.6961725848341884, "learning_rate": 7.370569633967216e-06, "loss": 4.0817, "step": 4075 }, { "epoch": 4.66, "grad_norm": 5.928782245637853, "learning_rate": 7.321714920437049e-06, "loss": 3.987, "step": 4076 }, { "epoch": 4.66, "grad_norm": 2.1358984546770157, "learning_rate": 7.273020658548123e-06, "loss": 4.1768, "step": 4077 }, { "epoch": 4.66, "grad_norm": 4.3711733486853035, "learning_rate": 7.22448687499545e-06, "loss": 4.0668, "step": 4078 }, { "epoch": 4.66, "grad_norm": 3.351720278093051, "learning_rate": 7.176113596386213e-06, "loss": 4.1144, "step": 4079 }, { "epoch": 4.66, "grad_norm": 2.927710686504573, "learning_rate": 7.1279008492394655e-06, "loss": 4.0912, "step": 4080 }, { "epoch": 4.66, "grad_norm": 3.995003234978026, "learning_rate": 7.079848659986432e-06, "loss": 3.979, "step": 4081 }, { "epoch": 4.67, "grad_norm": 4.808852220992887, "learning_rate": 7.031957054970072e-06, "loss": 4.0423, "step": 4082 }, { "epoch": 4.67, "grad_norm": 4.57601012863105, "learning_rate": 6.984226060445519e-06, "loss": 3.9875, "step": 4083 }, { "epoch": 4.67, "grad_norm": 3.057954630628553, "learning_rate": 6.936655702579807e-06, "loss": 4.1043, "step": 4084 }, { "epoch": 4.67, "grad_norm": 4.833889604894008, "learning_rate": 6.88924600745191e-06, "loss": 4.2445, "step": 4085 }, { "epoch": 4.67, "grad_norm": 4.050909005029455, "learning_rate": 6.841997001052602e-06, "loss": 3.9401, "step": 4086 }, { "epoch": 4.67, "grad_norm": 3.273187578006022, "learning_rate": 6.79490870928473e-06, "loss": 4.1666, "step": 4087 }, { "epoch": 4.67, "grad_norm": 2.2644079227607077, "learning_rate": 6.747981157962912e-06, "loss": 4.0261, "step": 4088 }, { "epoch": 4.67, "grad_norm": 3.610146476168009, "learning_rate": 6.701214372813802e-06, "loss": 3.9117, "step": 4089 }, { "epoch": 4.67, "grad_norm": 2.371341100941479, "learning_rate": 6.654608379475656e-06, "loss": 4.0189, "step": 4090 }, { "epoch": 4.68, "grad_norm": 2.7828718714905083, "learning_rate": 6.608163203498906e-06, "loss": 3.9445, "step": 4091 }, { "epoch": 4.68, "grad_norm": 3.3083006592788493, "learning_rate": 6.561878870345549e-06, "loss": 4.3049, "step": 4092 }, { "epoch": 4.68, "grad_norm": 1.9106554604435018, "learning_rate": 6.515755405389522e-06, "loss": 4.0196, "step": 4093 }, { "epoch": 4.68, "grad_norm": 3.35902876998861, "learning_rate": 6.469792833916632e-06, "loss": 4.1701, "step": 4094 }, { "epoch": 4.68, "grad_norm": 4.164324827294454, "learning_rate": 6.423991181124355e-06, "loss": 4.1308, "step": 4095 }, { "epoch": 4.68, "grad_norm": 3.2068679085442238, "learning_rate": 6.378350472121973e-06, "loss": 4.321, "step": 4096 }, { "epoch": 4.68, "grad_norm": 4.10691182137336, "learning_rate": 6.332870731930606e-06, "loss": 4.1513, "step": 4097 }, { "epoch": 4.68, "grad_norm": 5.137208000432623, "learning_rate": 6.287551985483075e-06, "loss": 4.0206, "step": 4098 }, { "epoch": 4.68, "grad_norm": 3.960813120268537, "learning_rate": 6.242394257623973e-06, "loss": 3.9969, "step": 4099 }, { "epoch": 4.69, "grad_norm": 5.24228697276774, "learning_rate": 6.1973975731095634e-06, "loss": 4.2676, "step": 4100 }, { "epoch": 4.69, "eval_blimp_filtered_avg": 0.5204477611940298, "eval_blimp_filtered_std": 0.005696443348474412, "step": 4100 }, { "epoch": 4.69, "eval_blimp_supplement_avg": 0.47629310344827586, "eval_blimp_supplement_std": 0.022712727381912646, "step": 4100 }, { "epoch": 4.69, "eval_vqa_filtered_avg": 0.36, "eval_vqa_filtered_std": 0.04824181513244218, "step": 4100 }, { "epoch": 4.69, "eval_winoground_filtered_avg": 0.5, "eval_winoground_filtered_std": 0.050251890762960605, "step": 4100 }, { "epoch": 4.69, "grad_norm": 2.6149711046487107, "learning_rate": 6.152561956607915e-06, "loss": 4.1195, "step": 4101 }, { "epoch": 4.69, "grad_norm": 2.277802517595267, "learning_rate": 6.107887432698699e-06, "loss": 4.1484, "step": 4102 }, { "epoch": 4.69, "grad_norm": 4.514651931816453, "learning_rate": 6.063374025873324e-06, "loss": 4.313, "step": 4103 }, { "epoch": 4.69, "grad_norm": 2.9516369775750326, "learning_rate": 6.0190217605348704e-06, "loss": 4.2599, "step": 4104 }, { "epoch": 4.69, "grad_norm": 2.81229472610806, "learning_rate": 5.97483066099802e-06, "loss": 3.9711, "step": 4105 }, { "epoch": 4.69, "grad_norm": 3.212383496486994, "learning_rate": 5.930800751489228e-06, "loss": 4.1372, "step": 4106 }, { "epoch": 4.69, "grad_norm": 5.8711344141211805, "learning_rate": 5.886932056146454e-06, "loss": 3.8939, "step": 4107 }, { "epoch": 4.69, "grad_norm": 2.817584409686935, "learning_rate": 5.843224599019325e-06, "loss": 4.171, "step": 4108 }, { "epoch": 4.7, "grad_norm": 3.627385570816045, "learning_rate": 5.799678404069108e-06, "loss": 4.0284, "step": 4109 }, { "epoch": 4.7, "grad_norm": 5.903950099636898, "learning_rate": 5.756293495168607e-06, "loss": 4.1023, "step": 4110 }, { "epoch": 4.7, "grad_norm": 4.004538078157361, "learning_rate": 5.713069896102263e-06, "loss": 4.166, "step": 4111 }, { "epoch": 4.7, "grad_norm": 3.7016423947538026, "learning_rate": 5.67000763056602e-06, "loss": 4.0772, "step": 4112 }, { "epoch": 4.7, "grad_norm": 6.482951501421826, "learning_rate": 5.627106722167396e-06, "loss": 3.9197, "step": 4113 }, { "epoch": 4.7, "grad_norm": 2.2694869640960103, "learning_rate": 5.584367194425476e-06, "loss": 4.1161, "step": 4114 }, { "epoch": 4.7, "grad_norm": 2.831325612127297, "learning_rate": 5.541789070770852e-06, "loss": 4.0728, "step": 4115 }, { "epoch": 4.7, "grad_norm": 2.5351871512577775, "learning_rate": 5.499372374545652e-06, "loss": 4.0256, "step": 4116 }, { "epoch": 4.71, "grad_norm": 2.4748892820694204, "learning_rate": 5.457117129003474e-06, "loss": 4.1725, "step": 4117 }, { "epoch": 4.71, "grad_norm": 3.236820638804631, "learning_rate": 5.415023357309456e-06, "loss": 4.1681, "step": 4118 }, { "epoch": 4.71, "grad_norm": 4.835061284108866, "learning_rate": 5.373091082540171e-06, "loss": 4.2828, "step": 4119 }, { "epoch": 4.71, "grad_norm": 2.9945199114469996, "learning_rate": 5.331320327683664e-06, "loss": 4.1166, "step": 4120 }, { "epoch": 4.71, "grad_norm": 2.368580364427367, "learning_rate": 5.289711115639384e-06, "loss": 3.9402, "step": 4121 }, { "epoch": 4.71, "grad_norm": 2.988604763486571, "learning_rate": 5.2482634692183504e-06, "loss": 4.1609, "step": 4122 }, { "epoch": 4.71, "grad_norm": 3.1501703987649634, "learning_rate": 5.206977411142854e-06, "loss": 4.0472, "step": 4123 }, { "epoch": 4.71, "grad_norm": 2.720187649860745, "learning_rate": 5.165852964046724e-06, "loss": 4.0094, "step": 4124 }, { "epoch": 4.71, "grad_norm": 3.275650154581212, "learning_rate": 5.124890150475158e-06, "loss": 4.0372, "step": 4125 }, { "epoch": 4.72, "grad_norm": 3.5771965814390256, "learning_rate": 5.084088992884661e-06, "loss": 4.1532, "step": 4126 }, { "epoch": 4.72, "grad_norm": 3.4139027295565674, "learning_rate": 5.043449513643239e-06, "loss": 4.1858, "step": 4127 }, { "epoch": 4.72, "grad_norm": 3.6394965180709673, "learning_rate": 5.00297173503017e-06, "loss": 3.91, "step": 4128 }, { "epoch": 4.72, "grad_norm": 4.833828983780835, "learning_rate": 4.962655679236138e-06, "loss": 4.0769, "step": 4129 }, { "epoch": 4.72, "grad_norm": 2.8037701755015374, "learning_rate": 4.9225013683631296e-06, "loss": 3.9953, "step": 4130 }, { "epoch": 4.72, "grad_norm": 3.318653125697597, "learning_rate": 4.8825088244245025e-06, "loss": 4.1853, "step": 4131 }, { "epoch": 4.72, "grad_norm": 3.845439781261716, "learning_rate": 4.842678069344952e-06, "loss": 4.0148, "step": 4132 }, { "epoch": 4.72, "grad_norm": 3.2586936237347817, "learning_rate": 4.803009124960344e-06, "loss": 4.0513, "step": 4133 }, { "epoch": 4.72, "grad_norm": 2.6450403752270324, "learning_rate": 4.76350201301795e-06, "loss": 4.1923, "step": 4134 }, { "epoch": 4.73, "grad_norm": 3.7118694120438356, "learning_rate": 4.724156755176311e-06, "loss": 4.1852, "step": 4135 }, { "epoch": 4.73, "grad_norm": 4.102684565829334, "learning_rate": 4.684973373005274e-06, "loss": 4.1537, "step": 4136 }, { "epoch": 4.73, "grad_norm": 3.735714042891191, "learning_rate": 4.6459518879857885e-06, "loss": 3.93, "step": 4137 }, { "epoch": 4.73, "grad_norm": 4.78599579963367, "learning_rate": 4.6070923215102085e-06, "loss": 3.9531, "step": 4138 }, { "epoch": 4.73, "grad_norm": 4.676409226759489, "learning_rate": 4.568394694882094e-06, "loss": 4.0574, "step": 4139 }, { "epoch": 4.73, "grad_norm": 6.173571341935304, "learning_rate": 4.529859029316174e-06, "loss": 4.1035, "step": 4140 }, { "epoch": 4.73, "grad_norm": 4.590076334845145, "learning_rate": 4.491485345938417e-06, "loss": 4.0306, "step": 4141 }, { "epoch": 4.73, "grad_norm": 3.4769994206731942, "learning_rate": 4.453273665785928e-06, "loss": 3.8919, "step": 4142 }, { "epoch": 4.73, "grad_norm": 2.429257278084257, "learning_rate": 4.41522400980715e-06, "loss": 3.8789, "step": 4143 }, { "epoch": 4.74, "grad_norm": 3.084300406476649, "learning_rate": 4.377336398861564e-06, "loss": 4.136, "step": 4144 }, { "epoch": 4.74, "grad_norm": 5.009343215140627, "learning_rate": 4.3396108537198215e-06, "loss": 4.0704, "step": 4145 }, { "epoch": 4.74, "grad_norm": 2.6738966447902937, "learning_rate": 4.302047395063813e-06, "loss": 4.0965, "step": 4146 }, { "epoch": 4.74, "grad_norm": 3.197313815231188, "learning_rate": 4.264646043486531e-06, "loss": 4.0851, "step": 4147 }, { "epoch": 4.74, "grad_norm": 3.318072290847935, "learning_rate": 4.22740681949204e-06, "loss": 3.9411, "step": 4148 }, { "epoch": 4.74, "grad_norm": 3.322493926119286, "learning_rate": 4.190329743495613e-06, "loss": 4.0534, "step": 4149 }, { "epoch": 4.74, "grad_norm": 3.443883311067105, "learning_rate": 4.153414835823521e-06, "loss": 3.9464, "step": 4150 }, { "epoch": 4.74, "grad_norm": 6.15298458701985, "learning_rate": 4.116662116713276e-06, "loss": 4.2068, "step": 4151 }, { "epoch": 4.75, "grad_norm": 3.180087174152525, "learning_rate": 4.080071606313362e-06, "loss": 3.9826, "step": 4152 }, { "epoch": 4.75, "grad_norm": 2.7083159766677936, "learning_rate": 4.043643324683366e-06, "loss": 4.0189, "step": 4153 }, { "epoch": 4.75, "grad_norm": 5.406095752350838, "learning_rate": 4.007377291793945e-06, "loss": 4.121, "step": 4154 }, { "epoch": 4.75, "grad_norm": 6.365366498001963, "learning_rate": 3.9712735275268306e-06, "loss": 4.0305, "step": 4155 }, { "epoch": 4.75, "grad_norm": 6.4125106864791865, "learning_rate": 3.9353320516747555e-06, "loss": 4.0175, "step": 4156 }, { "epoch": 4.75, "grad_norm": 3.5904505361391523, "learning_rate": 3.8995528839415595e-06, "loss": 3.9473, "step": 4157 }, { "epoch": 4.75, "grad_norm": 4.227464933977833, "learning_rate": 3.8639360439419175e-06, "loss": 3.9673, "step": 4158 }, { "epoch": 4.75, "grad_norm": 5.1514159128125385, "learning_rate": 3.828481551201745e-06, "loss": 4.2055, "step": 4159 }, { "epoch": 4.75, "grad_norm": 5.956088992937854, "learning_rate": 3.7931894251577942e-06, "loss": 4.0795, "step": 4160 }, { "epoch": 4.76, "grad_norm": 3.2632840821980027, "learning_rate": 3.758059685157888e-06, "loss": 3.867, "step": 4161 }, { "epoch": 4.76, "grad_norm": 4.00905065418712, "learning_rate": 3.723092350460788e-06, "loss": 4.0146, "step": 4162 }, { "epoch": 4.76, "grad_norm": 4.147756156578193, "learning_rate": 3.6882874402362596e-06, "loss": 3.9144, "step": 4163 }, { "epoch": 4.76, "grad_norm": 2.4859674266689074, "learning_rate": 3.6536449735649397e-06, "loss": 4.2, "step": 4164 }, { "epoch": 4.76, "grad_norm": 6.487107352157862, "learning_rate": 3.6191649694385038e-06, "loss": 4.0864, "step": 4165 }, { "epoch": 4.76, "grad_norm": 3.9734564660950773, "learning_rate": 3.5848474467595644e-06, "loss": 4.0906, "step": 4166 }, { "epoch": 4.76, "grad_norm": 4.854503198710445, "learning_rate": 3.5506924243415726e-06, "loss": 4.1284, "step": 4167 }, { "epoch": 4.76, "grad_norm": 3.5918630458342444, "learning_rate": 3.5166999209089496e-06, "loss": 3.9251, "step": 4168 }, { "epoch": 4.76, "grad_norm": 4.556655383855815, "learning_rate": 3.482869955096956e-06, "loss": 4.0307, "step": 4169 }, { "epoch": 4.77, "grad_norm": 4.412540688597168, "learning_rate": 3.4492025454519566e-06, "loss": 4.0176, "step": 4170 }, { "epoch": 4.77, "grad_norm": 11.507594320247751, "learning_rate": 3.41569771043092e-06, "loss": 4.0502, "step": 4171 }, { "epoch": 4.77, "grad_norm": 6.203366523882198, "learning_rate": 3.3823554684018204e-06, "loss": 4.1232, "step": 4172 }, { "epoch": 4.77, "grad_norm": 2.4346679836100935, "learning_rate": 3.3491758376435697e-06, "loss": 4.1323, "step": 4173 }, { "epoch": 4.77, "grad_norm": 2.689411964647328, "learning_rate": 3.3161588363457837e-06, "loss": 4.2082, "step": 4174 }, { "epoch": 4.77, "grad_norm": 4.598436617239837, "learning_rate": 3.283304482609017e-06, "loss": 4.0, "step": 4175 }, { "epoch": 4.77, "grad_norm": 2.1490909066566743, "learning_rate": 3.250612794444629e-06, "loss": 4.2136, "step": 4176 }, { "epoch": 4.77, "grad_norm": 2.7089804031300773, "learning_rate": 3.2180837897747837e-06, "loss": 4.1596, "step": 4177 }, { "epoch": 4.77, "grad_norm": 5.468794429537608, "learning_rate": 3.1857174864325483e-06, "loss": 4.104, "step": 4178 }, { "epoch": 4.78, "grad_norm": 2.2817485068869248, "learning_rate": 3.153513902161664e-06, "loss": 4.069, "step": 4179 }, { "epoch": 4.78, "grad_norm": 4.155132628270886, "learning_rate": 3.121473054616741e-06, "loss": 4.1985, "step": 4180 }, { "epoch": 4.78, "grad_norm": 4.115890093220942, "learning_rate": 3.0895949613631953e-06, "loss": 4.1008, "step": 4181 }, { "epoch": 4.78, "grad_norm": 3.511188157229788, "learning_rate": 3.057879639877148e-06, "loss": 4.184, "step": 4182 }, { "epoch": 4.78, "grad_norm": 4.294932916599916, "learning_rate": 3.0263271075455565e-06, "loss": 3.864, "step": 4183 }, { "epoch": 4.78, "grad_norm": 2.4660159509038078, "learning_rate": 2.994937381666085e-06, "loss": 4.1631, "step": 4184 }, { "epoch": 4.78, "grad_norm": 5.705390488624052, "learning_rate": 2.963710479447168e-06, "loss": 4.2812, "step": 4185 }, { "epoch": 4.78, "grad_norm": 5.203760861932563, "learning_rate": 2.9326464180079444e-06, "loss": 3.8155, "step": 4186 }, { "epoch": 4.79, "grad_norm": 4.339879197030374, "learning_rate": 2.9017452143783595e-06, "loss": 3.9618, "step": 4187 }, { "epoch": 4.79, "grad_norm": 2.8428504898493556, "learning_rate": 2.8710068854989943e-06, "loss": 3.8488, "step": 4188 }, { "epoch": 4.79, "grad_norm": 5.223396823981209, "learning_rate": 2.8404314482211367e-06, "loss": 3.993, "step": 4189 }, { "epoch": 4.79, "grad_norm": 2.985357396010778, "learning_rate": 2.8100189193068444e-06, "loss": 4.3529, "step": 4190 }, { "epoch": 4.79, "grad_norm": 4.508292793179489, "learning_rate": 2.779769315428848e-06, "loss": 4.1144, "step": 4191 }, { "epoch": 4.79, "grad_norm": 3.5488049162484185, "learning_rate": 2.7496826531704817e-06, "loss": 4.1042, "step": 4192 }, { "epoch": 4.79, "grad_norm": 3.538211888417464, "learning_rate": 2.7197589490258187e-06, "loss": 4.1615, "step": 4193 }, { "epoch": 4.79, "grad_norm": 4.560958382284151, "learning_rate": 2.689998219399636e-06, "loss": 4.1783, "step": 4194 }, { "epoch": 4.79, "grad_norm": 3.4849233319067023, "learning_rate": 2.6604004806072833e-06, "loss": 4.2348, "step": 4195 }, { "epoch": 4.8, "grad_norm": 4.248879689872377, "learning_rate": 2.63096574887478e-06, "loss": 3.9831, "step": 4196 }, { "epoch": 4.8, "grad_norm": 2.7331674647925515, "learning_rate": 2.60169404033882e-06, "loss": 4.0615, "step": 4197 }, { "epoch": 4.8, "grad_norm": 4.865499172092791, "learning_rate": 2.5725853710466314e-06, "loss": 4.0719, "step": 4198 }, { "epoch": 4.8, "grad_norm": 3.0325299751515638, "learning_rate": 2.543639756956184e-06, "loss": 4.1211, "step": 4199 }, { "epoch": 4.8, "grad_norm": 2.330675982887287, "learning_rate": 2.5148572139359836e-06, "loss": 3.9945, "step": 4200 }, { "epoch": 4.8, "eval_blimp_filtered_avg": 0.5231343283582089, "eval_blimp_filtered_std": 0.005716553515323113, "step": 4200 }, { "epoch": 4.8, "eval_blimp_supplement_avg": 0.46120689655172414, "eval_blimp_supplement_std": 0.022616394405843454, "step": 4200 }, { "epoch": 4.8, "eval_vqa_filtered_avg": 0.36, "eval_vqa_filtered_std": 0.04824181513244218, "step": 4200 }, { "epoch": 4.8, "eval_winoground_filtered_avg": 0.51, "eval_winoground_filtered_std": 0.05024183937956912, "step": 4200 }, { "epoch": 4.8, "grad_norm": 3.7547579273425833, "learning_rate": 2.4862377577651415e-06, "loss": 4.0467, "step": 4201 }, { "epoch": 4.8, "grad_norm": 4.07934525725909, "learning_rate": 2.457781404133374e-06, "loss": 4.0441, "step": 4202 }, { "epoch": 4.8, "grad_norm": 4.051522547069704, "learning_rate": 2.429488168641003e-06, "loss": 3.9748, "step": 4203 }, { "epoch": 4.8, "grad_norm": 7.472598926045134, "learning_rate": 2.4013580667988865e-06, "loss": 4.0727, "step": 4204 }, { "epoch": 4.81, "grad_norm": 4.010529503372069, "learning_rate": 2.3733911140284554e-06, "loss": 4.1224, "step": 4205 }, { "epoch": 4.81, "grad_norm": 4.005729006674253, "learning_rate": 2.345587325661713e-06, "loss": 4.0498, "step": 4206 }, { "epoch": 4.81, "grad_norm": 2.8121572135451705, "learning_rate": 2.3179467169411992e-06, "loss": 4.2631, "step": 4207 }, { "epoch": 4.81, "grad_norm": 5.087392132720708, "learning_rate": 2.290469303020026e-06, "loss": 3.9391, "step": 4208 }, { "epoch": 4.81, "grad_norm": 2.91428516227794, "learning_rate": 2.263155098961844e-06, "loss": 4.1292, "step": 4209 }, { "epoch": 4.81, "grad_norm": 5.581014378424899, "learning_rate": 2.236004119740742e-06, "loss": 4.0435, "step": 4210 }, { "epoch": 4.81, "grad_norm": 3.348495432086857, "learning_rate": 2.209016380241413e-06, "loss": 4.1711, "step": 4211 }, { "epoch": 4.81, "grad_norm": 3.611437043206666, "learning_rate": 2.1821918952590887e-06, "loss": 3.9138, "step": 4212 }, { "epoch": 4.81, "grad_norm": 5.9296352876818235, "learning_rate": 2.1555306794993733e-06, "loss": 4.2207, "step": 4213 }, { "epoch": 4.82, "grad_norm": 3.348326475461506, "learning_rate": 2.129032747578474e-06, "loss": 4.2114, "step": 4214 }, { "epoch": 4.82, "grad_norm": 3.4671733132709837, "learning_rate": 2.102698114023005e-06, "loss": 4.1773, "step": 4215 }, { "epoch": 4.82, "grad_norm": 4.400638138211643, "learning_rate": 2.07652679327015e-06, "loss": 4.0814, "step": 4216 }, { "epoch": 4.82, "grad_norm": 5.513436027559663, "learning_rate": 2.0505187996675e-06, "loss": 4.0981, "step": 4217 }, { "epoch": 4.82, "grad_norm": 6.272452406993846, "learning_rate": 2.0246741474731155e-06, "loss": 4.1431, "step": 4218 }, { "epoch": 4.82, "grad_norm": 4.465939717062978, "learning_rate": 1.9989928508554964e-06, "loss": 4.1365, "step": 4219 }, { "epoch": 4.82, "grad_norm": 2.4813228046095595, "learning_rate": 1.9734749238936143e-06, "loss": 4.0744, "step": 4220 }, { "epoch": 4.82, "grad_norm": 4.240061597998089, "learning_rate": 1.9481203805768784e-06, "loss": 3.9837, "step": 4221 }, { "epoch": 4.83, "grad_norm": 5.433178681054289, "learning_rate": 1.9229292348050707e-06, "loss": 4.257, "step": 4222 }, { "epoch": 4.83, "grad_norm": 4.488418544816571, "learning_rate": 1.89790150038851e-06, "loss": 3.9483, "step": 4223 }, { "epoch": 4.83, "grad_norm": 3.222550174555159, "learning_rate": 1.8730371910477881e-06, "loss": 4.1021, "step": 4224 }, { "epoch": 4.83, "grad_norm": 3.0709484894187664, "learning_rate": 1.8483363204140678e-06, "loss": 4.0688, "step": 4225 }, { "epoch": 4.83, "grad_norm": 3.6248980556689094, "learning_rate": 1.8237989020287835e-06, "loss": 3.8968, "step": 4226 }, { "epoch": 4.83, "grad_norm": 5.8921432998410355, "learning_rate": 1.799424949343742e-06, "loss": 3.9676, "step": 4227 }, { "epoch": 4.83, "grad_norm": 3.4399832279655778, "learning_rate": 1.7752144757212872e-06, "loss": 4.1346, "step": 4228 }, { "epoch": 4.83, "grad_norm": 3.4524870727730548, "learning_rate": 1.7511674944340026e-06, "loss": 4.0019, "step": 4229 }, { "epoch": 4.83, "grad_norm": 3.320787145718268, "learning_rate": 1.7272840186648762e-06, "loss": 4.1001, "step": 4230 }, { "epoch": 4.84, "grad_norm": 7.611934753941828, "learning_rate": 1.7035640615072675e-06, "loss": 3.9197, "step": 4231 }, { "epoch": 4.84, "grad_norm": 3.5839858280563117, "learning_rate": 1.6800076359649417e-06, "loss": 3.9388, "step": 4232 }, { "epoch": 4.84, "grad_norm": 2.8072818012250047, "learning_rate": 1.656614754951935e-06, "loss": 4.1738, "step": 4233 }, { "epoch": 4.84, "grad_norm": 3.5634375543039423, "learning_rate": 1.6333854312926553e-06, "loss": 3.9566, "step": 4234 }, { "epoch": 4.84, "grad_norm": 3.525067469446047, "learning_rate": 1.6103196777218497e-06, "loss": 4.239, "step": 4235 }, { "epoch": 4.84, "grad_norm": 4.225297022597572, "learning_rate": 1.587417506884603e-06, "loss": 4.169, "step": 4236 }, { "epoch": 4.84, "grad_norm": 2.338187060397291, "learning_rate": 1.5646789313362718e-06, "loss": 3.9228, "step": 4237 }, { "epoch": 4.84, "grad_norm": 4.444444305731669, "learning_rate": 1.542103963542618e-06, "loss": 4.0758, "step": 4238 }, { "epoch": 4.84, "grad_norm": 4.219639453849816, "learning_rate": 1.5196926158796086e-06, "loss": 4.1944, "step": 4239 }, { "epoch": 4.85, "grad_norm": 3.8808928755468837, "learning_rate": 1.4974449006335488e-06, "loss": 3.9017, "step": 4240 }, { "epoch": 4.85, "grad_norm": 3.6848037384655767, "learning_rate": 1.4753608300011154e-06, "loss": 4.0909, "step": 4241 }, { "epoch": 4.85, "grad_norm": 3.6720856821322267, "learning_rate": 1.4534404160891578e-06, "loss": 4.1376, "step": 4242 }, { "epoch": 4.85, "grad_norm": 3.2732453471440524, "learning_rate": 1.4316836709148293e-06, "loss": 4.1344, "step": 4243 }, { "epoch": 4.85, "grad_norm": 2.9077824328001842, "learning_rate": 1.4100906064056228e-06, "loss": 4.0221, "step": 4244 }, { "epoch": 4.85, "grad_norm": 3.5855252111728406, "learning_rate": 1.3886612343992353e-06, "loss": 4.1997, "step": 4245 }, { "epoch": 4.85, "grad_norm": 7.88933377398823, "learning_rate": 1.3673955666436697e-06, "loss": 3.9867, "step": 4246 }, { "epoch": 4.85, "grad_norm": 3.3340897741680715, "learning_rate": 1.3462936147971004e-06, "loss": 4.0624, "step": 4247 }, { "epoch": 4.85, "grad_norm": 3.9989394213696885, "learning_rate": 1.3253553904280733e-06, "loss": 4.2275, "step": 4248 }, { "epoch": 4.86, "grad_norm": 2.9809093475882076, "learning_rate": 1.3045809050152733e-06, "loss": 4.0985, "step": 4249 }, { "epoch": 4.86, "grad_norm": 2.8112113409700425, "learning_rate": 1.2839701699476569e-06, "loss": 3.9454, "step": 4250 }, { "epoch": 4.86, "grad_norm": 7.13902909286275, "learning_rate": 1.2635231965244186e-06, "loss": 3.9962, "step": 4251 }, { "epoch": 4.86, "grad_norm": 2.2712482703131482, "learning_rate": 1.2432399959549589e-06, "loss": 3.9627, "step": 4252 }, { "epoch": 4.86, "grad_norm": 2.2415012708509496, "learning_rate": 1.2231205793588828e-06, "loss": 4.2479, "step": 4253 }, { "epoch": 4.86, "grad_norm": 2.3796691939702925, "learning_rate": 1.2031649577660674e-06, "loss": 4.2083, "step": 4254 }, { "epoch": 4.86, "grad_norm": 4.100473524706443, "learning_rate": 1.1833731421165616e-06, "loss": 4.0673, "step": 4255 }, { "epoch": 4.86, "grad_norm": 6.9469106066208965, "learning_rate": 1.1637451432605528e-06, "loss": 4.1317, "step": 4256 }, { "epoch": 4.87, "grad_norm": 3.5245754122375295, "learning_rate": 1.1442809719585e-06, "loss": 3.918, "step": 4257 }, { "epoch": 4.87, "grad_norm": 8.20987723325389, "learning_rate": 1.1249806388810344e-06, "loss": 4.1029, "step": 4258 }, { "epoch": 4.87, "grad_norm": 2.704097506477064, "learning_rate": 1.1058441546089258e-06, "loss": 4.253, "step": 4259 }, { "epoch": 4.87, "grad_norm": 3.0459610104815202, "learning_rate": 1.086871529633182e-06, "loss": 3.9569, "step": 4260 }, { "epoch": 4.87, "grad_norm": 2.4722202042835764, "learning_rate": 1.0680627743549164e-06, "loss": 3.9733, "step": 4261 }, { "epoch": 4.87, "grad_norm": 5.232631801695797, "learning_rate": 1.0494178990854473e-06, "loss": 3.8992, "step": 4262 }, { "epoch": 4.87, "grad_norm": 3.644816440535391, "learning_rate": 1.030936914046232e-06, "loss": 4.1171, "step": 4263 }, { "epoch": 4.87, "grad_norm": 2.6119471961733955, "learning_rate": 1.0126198293688992e-06, "loss": 4.1262, "step": 4264 }, { "epoch": 4.87, "grad_norm": 5.622430384165323, "learning_rate": 9.944666550952163e-07, "loss": 4.0512, "step": 4265 }, { "epoch": 4.88, "grad_norm": 7.430417663698003, "learning_rate": 9.764774011770559e-07, "loss": 4.0526, "step": 4266 }, { "epoch": 4.88, "grad_norm": 5.018548846294173, "learning_rate": 9.586520774765294e-07, "loss": 3.9743, "step": 4267 }, { "epoch": 4.88, "grad_norm": 5.763522486441058, "learning_rate": 9.409906937657197e-07, "loss": 3.939, "step": 4268 }, { "epoch": 4.88, "grad_norm": 2.8102831014498286, "learning_rate": 9.234932597270151e-07, "loss": 4.1408, "step": 4269 }, { "epoch": 4.88, "grad_norm": 3.847536595128641, "learning_rate": 9.061597849527425e-07, "loss": 4.0972, "step": 4270 }, { "epoch": 4.88, "grad_norm": 3.2299951960972715, "learning_rate": 8.889902789454673e-07, "loss": 4.0997, "step": 4271 }, { "epoch": 4.88, "grad_norm": 3.011594287761476, "learning_rate": 8.719847511178935e-07, "loss": 3.9894, "step": 4272 }, { "epoch": 4.88, "grad_norm": 4.473138311084413, "learning_rate": 8.551432107926637e-07, "loss": 3.9967, "step": 4273 }, { "epoch": 4.88, "grad_norm": 4.026569643627682, "learning_rate": 8.384656672026924e-07, "loss": 4.0304, "step": 4274 }, { "epoch": 4.89, "grad_norm": 3.0303858704766107, "learning_rate": 8.21952129490866e-07, "loss": 4.2153, "step": 4275 }, { "epoch": 4.89, "grad_norm": 3.301139705225723, "learning_rate": 8.056026067102761e-07, "loss": 4.2036, "step": 4276 }, { "epoch": 4.89, "grad_norm": 10.711978451032289, "learning_rate": 7.89417107823953e-07, "loss": 4.1081, "step": 4277 }, { "epoch": 4.89, "grad_norm": 6.137029322210865, "learning_rate": 7.733956417051323e-07, "loss": 4.0727, "step": 4278 }, { "epoch": 4.89, "grad_norm": 4.837632274165734, "learning_rate": 7.57538217137088e-07, "loss": 4.1043, "step": 4279 }, { "epoch": 4.89, "grad_norm": 5.114966585297645, "learning_rate": 7.41844842813133e-07, "loss": 4.1703, "step": 4280 }, { "epoch": 4.89, "grad_norm": 3.290231766103547, "learning_rate": 7.263155273366849e-07, "loss": 4.1182, "step": 4281 }, { "epoch": 4.89, "grad_norm": 3.6300041077518688, "learning_rate": 7.109502792211674e-07, "loss": 4.0192, "step": 4282 }, { "epoch": 4.89, "grad_norm": 3.7960047694475705, "learning_rate": 6.957491068901089e-07, "loss": 4.1291, "step": 4283 }, { "epoch": 4.9, "grad_norm": 4.578634840949222, "learning_rate": 6.807120186770765e-07, "loss": 4.1268, "step": 4284 }, { "epoch": 4.9, "grad_norm": 2.604940314489177, "learning_rate": 6.65839022825676e-07, "loss": 3.9997, "step": 4285 }, { "epoch": 4.9, "grad_norm": 6.39501249214444, "learning_rate": 6.511301274895853e-07, "loss": 3.993, "step": 4286 }, { "epoch": 4.9, "grad_norm": 2.7532436713612354, "learning_rate": 6.365853407324206e-07, "loss": 4.1458, "step": 4287 }, { "epoch": 4.9, "grad_norm": 3.516494498617141, "learning_rate": 6.222046705280038e-07, "loss": 4.2123, "step": 4288 }, { "epoch": 4.9, "grad_norm": 4.365080384876002, "learning_rate": 6.07988124759995e-07, "loss": 4.0238, "step": 4289 }, { "epoch": 4.9, "grad_norm": 3.445366122923398, "learning_rate": 5.939357112222265e-07, "loss": 4.1211, "step": 4290 }, { "epoch": 4.9, "grad_norm": 4.123339418003066, "learning_rate": 5.800474376184361e-07, "loss": 4.031, "step": 4291 }, { "epoch": 4.91, "grad_norm": 4.498494920250651, "learning_rate": 5.663233115624999e-07, "loss": 4.1058, "step": 4292 }, { "epoch": 4.91, "grad_norm": 4.470794214364937, "learning_rate": 5.527633405781662e-07, "loss": 4.001, "step": 4293 }, { "epoch": 4.91, "grad_norm": 3.458606025466353, "learning_rate": 5.393675320993219e-07, "loss": 4.1887, "step": 4294 }, { "epoch": 4.91, "grad_norm": 4.931505254193795, "learning_rate": 5.261358934697591e-07, "loss": 4.1386, "step": 4295 }, { "epoch": 4.91, "grad_norm": 5.090000354377346, "learning_rate": 5.130684319433087e-07, "loss": 4.3127, "step": 4296 }, { "epoch": 4.91, "grad_norm": 4.323501093793252, "learning_rate": 5.001651546838071e-07, "loss": 4.145, "step": 4297 }, { "epoch": 4.91, "grad_norm": 4.76244980723696, "learning_rate": 4.874260687650622e-07, "loss": 4.1077, "step": 4298 }, { "epoch": 4.91, "grad_norm": 3.529134593421125, "learning_rate": 4.748511811708877e-07, "loss": 4.0039, "step": 4299 }, { "epoch": 4.91, "grad_norm": 3.4278601172633723, "learning_rate": 4.624404987950692e-07, "loss": 4.1369, "step": 4300 }, { "epoch": 4.91, "eval_blimp_filtered_avg": 0.5222388059701493, "eval_blimp_filtered_std": 0.0057290963420511135, "step": 4300 }, { "epoch": 4.91, "eval_blimp_supplement_avg": 0.46120689655172414, "eval_blimp_supplement_std": 0.022554741357676152, "step": 4300 }, { "epoch": 4.91, "eval_vqa_filtered_avg": 0.36, "eval_vqa_filtered_std": 0.04824181513244218, "step": 4300 }, { "epoch": 4.91, "eval_winoground_filtered_avg": 0.53, "eval_winoground_filtered_std": 0.0501613558046592, "step": 4300 }, { "epoch": 4.92, "grad_norm": 3.9656988221212077, "learning_rate": 4.501940284413308e-07, "loss": 3.9099, "step": 4301 }, { "epoch": 4.92, "grad_norm": 6.409908685271376, "learning_rate": 4.381117768234688e-07, "loss": 3.8438, "step": 4302 }, { "epoch": 4.92, "grad_norm": 11.677065277470783, "learning_rate": 4.2619375056518467e-07, "loss": 4.0962, "step": 4303 }, { "epoch": 4.92, "grad_norm": 2.8510011030762916, "learning_rate": 4.1443995620015214e-07, "loss": 4.0841, "step": 4304 }, { "epoch": 4.92, "grad_norm": 3.9867019766809775, "learning_rate": 4.0285040017198343e-07, "loss": 4.0376, "step": 4305 }, { "epoch": 4.92, "grad_norm": 6.310020011240259, "learning_rate": 3.9142508883436284e-07, "loss": 4.121, "step": 4306 }, { "epoch": 4.92, "grad_norm": 3.111450967658074, "learning_rate": 3.8016402845078007e-07, "loss": 4.1149, "step": 4307 }, { "epoch": 4.92, "grad_norm": 3.7600243324669895, "learning_rate": 3.690672251947968e-07, "loss": 4.0199, "step": 4308 }, { "epoch": 4.92, "grad_norm": 2.573730214908648, "learning_rate": 3.581346851498801e-07, "loss": 4.1373, "step": 4309 }, { "epoch": 4.93, "grad_norm": 2.977436646327557, "learning_rate": 3.47366414309469e-07, "loss": 4.1751, "step": 4310 }, { "epoch": 4.93, "grad_norm": 2.270514061523291, "learning_rate": 3.367624185768747e-07, "loss": 3.8962, "step": 4311 }, { "epoch": 4.93, "grad_norm": 4.262224850159704, "learning_rate": 3.2632270376548033e-07, "loss": 4.0127, "step": 4312 }, { "epoch": 4.93, "grad_norm": 3.186231074858047, "learning_rate": 3.160472755984411e-07, "loss": 4.0488, "step": 4313 }, { "epoch": 4.93, "grad_norm": 6.113654909726644, "learning_rate": 3.0593613970901763e-07, "loss": 3.9435, "step": 4314 }, { "epoch": 4.93, "grad_norm": 3.973467150168338, "learning_rate": 2.959893016402759e-07, "loss": 4.1979, "step": 4315 }, { "epoch": 4.93, "grad_norm": 3.01520381173676, "learning_rate": 2.862067668452872e-07, "loss": 4.2707, "step": 4316 }, { "epoch": 4.93, "grad_norm": 3.7118286004966703, "learning_rate": 2.765885406869617e-07, "loss": 4.0569, "step": 4317 }, { "epoch": 4.93, "grad_norm": 4.307557593584464, "learning_rate": 2.6713462843824806e-07, "loss": 4.0975, "step": 4318 }, { "epoch": 4.94, "grad_norm": 5.424494644087411, "learning_rate": 2.578450352819339e-07, "loss": 4.3158, "step": 4319 }, { "epoch": 4.94, "grad_norm": 4.8734211009708694, "learning_rate": 2.487197663107121e-07, "loss": 4.0962, "step": 4320 }, { "epoch": 4.94, "grad_norm": 2.7765695327363455, "learning_rate": 2.39758826527281e-07, "loss": 4.3301, "step": 4321 }, { "epoch": 4.94, "grad_norm": 3.8955758623928287, "learning_rate": 2.3096222084417747e-07, "loss": 4.1206, "step": 4322 }, { "epoch": 4.94, "grad_norm": 5.4124974380313695, "learning_rate": 2.2232995408384414e-07, "loss": 3.8779, "step": 4323 }, { "epoch": 4.94, "grad_norm": 2.110442629804503, "learning_rate": 2.1386203097866207e-07, "loss": 3.9958, "step": 4324 }, { "epoch": 4.94, "grad_norm": 3.6227525322338123, "learning_rate": 2.055584561708845e-07, "loss": 4.1369, "step": 4325 }, { "epoch": 4.94, "grad_norm": 3.1139243980873474, "learning_rate": 1.9741923421270345e-07, "loss": 4.0377, "step": 4326 }, { "epoch": 4.95, "grad_norm": 6.272368813218213, "learning_rate": 1.8944436956621622e-07, "loss": 4.0219, "step": 4327 }, { "epoch": 4.95, "grad_norm": 3.1277456396298144, "learning_rate": 1.8163386660332567e-07, "loss": 3.955, "step": 4328 }, { "epoch": 4.95, "grad_norm": 3.081851706869889, "learning_rate": 1.7398772960593998e-07, "loss": 4.0061, "step": 4329 }, { "epoch": 4.95, "grad_norm": 3.64514640094717, "learning_rate": 1.6650596276580608e-07, "loss": 4.0648, "step": 4330 }, { "epoch": 4.95, "grad_norm": 4.525689201332529, "learning_rate": 1.5918857018457633e-07, "loss": 3.9569, "step": 4331 }, { "epoch": 4.95, "grad_norm": 6.700132818395476, "learning_rate": 1.5203555587374183e-07, "loss": 3.9206, "step": 4332 }, { "epoch": 4.95, "grad_norm": 3.2099007417713357, "learning_rate": 1.4504692375476578e-07, "loss": 4.2668, "step": 4333 }, { "epoch": 4.95, "grad_norm": 4.540505640331635, "learning_rate": 1.382226776589168e-07, "loss": 3.9683, "step": 4334 }, { "epoch": 4.95, "grad_norm": 7.941807372233224, "learning_rate": 1.3156282132736895e-07, "loss": 4.1645, "step": 4335 }, { "epoch": 4.96, "grad_norm": 5.7960715694419935, "learning_rate": 1.2506735841120164e-07, "loss": 4.1258, "step": 4336 }, { "epoch": 4.96, "grad_norm": 3.002632555038053, "learning_rate": 1.1873629247129978e-07, "loss": 4.0374, "step": 4337 }, { "epoch": 4.96, "grad_norm": 2.48403033346022, "learning_rate": 1.1256962697852035e-07, "loss": 4.0181, "step": 4338 }, { "epoch": 4.96, "grad_norm": 3.9689927917512273, "learning_rate": 1.0656736531355903e-07, "loss": 4.0765, "step": 4339 }, { "epoch": 4.96, "grad_norm": 7.784373544895908, "learning_rate": 1.0072951076688374e-07, "loss": 3.912, "step": 4340 }, { "epoch": 4.96, "grad_norm": 2.7297961773294372, "learning_rate": 9.5056066539001e-08, "loss": 4.0069, "step": 4341 }, { "epoch": 4.96, "grad_norm": 3.6192125267970545, "learning_rate": 8.95470357401562e-08, "loss": 4.0035, "step": 4342 }, { "epoch": 4.96, "grad_norm": 6.553771956595043, "learning_rate": 8.420242139050016e-08, "loss": 4.1604, "step": 4343 }, { "epoch": 4.96, "grad_norm": 2.3115108875947934, "learning_rate": 7.902222642005574e-08, "loss": 4.0727, "step": 4344 }, { "epoch": 4.97, "grad_norm": 3.4405287209059794, "learning_rate": 7.400645366871794e-08, "loss": 4.082, "step": 4345 }, { "epoch": 4.97, "grad_norm": 3.097730092068558, "learning_rate": 6.915510588618723e-08, "loss": 4.3861, "step": 4346 }, { "epoch": 4.97, "grad_norm": 3.382189133781147, "learning_rate": 6.446818573210277e-08, "loss": 4.1442, "step": 4347 }, { "epoch": 4.97, "grad_norm": 3.5959033960081532, "learning_rate": 5.994569577590924e-08, "loss": 4.0968, "step": 4348 }, { "epoch": 4.97, "grad_norm": 4.6422708275639115, "learning_rate": 5.558763849689007e-08, "loss": 3.939, "step": 4349 }, { "epoch": 4.97, "grad_norm": 3.21636597272728, "learning_rate": 5.1394016284234117e-08, "loss": 4.0155, "step": 4350 }, { "epoch": 4.97, "grad_norm": 4.011094645916989, "learning_rate": 4.736483143696901e-08, "loss": 4.1768, "step": 4351 }, { "epoch": 4.97, "grad_norm": 3.5146615393447362, "learning_rate": 4.350008616399447e-08, "loss": 4.1232, "step": 4352 }, { "epoch": 4.97, "grad_norm": 3.586965568572597, "learning_rate": 3.979978258398242e-08, "loss": 3.8865, "step": 4353 }, { "epoch": 4.98, "grad_norm": 3.066656773249175, "learning_rate": 3.626392272551015e-08, "loss": 4.0584, "step": 4354 }, { "epoch": 4.98, "grad_norm": 3.0692073805272875, "learning_rate": 3.2892508527027074e-08, "loss": 3.92, "step": 4355 }, { "epoch": 4.98, "grad_norm": 2.781943776469792, "learning_rate": 2.968554183682137e-08, "loss": 4.1927, "step": 4356 }, { "epoch": 4.98, "grad_norm": 2.9928594043431183, "learning_rate": 2.6643024412986713e-08, "loss": 3.9699, "step": 4357 }, { "epoch": 4.98, "grad_norm": 3.2123473659005617, "learning_rate": 2.3764957923455563e-08, "loss": 4.1257, "step": 4358 }, { "epoch": 4.98, "grad_norm": 2.361301331532532, "learning_rate": 2.1051343946099086e-08, "loss": 4.247, "step": 4359 }, { "epoch": 4.98, "grad_norm": 4.160195816468606, "learning_rate": 1.8502183968527318e-08, "loss": 4.1823, "step": 4360 }, { "epoch": 4.98, "grad_norm": 3.2862173563304613, "learning_rate": 1.6117479388255695e-08, "loss": 4.1217, "step": 4361 }, { "epoch": 4.99, "grad_norm": 8.051234267083998, "learning_rate": 1.3897231512638441e-08, "loss": 4.198, "step": 4362 }, { "epoch": 4.99, "grad_norm": 4.445218180405192, "learning_rate": 1.1841441558801957e-08, "loss": 4.1696, "step": 4363 }, { "epoch": 4.99, "grad_norm": 4.397289277421306, "learning_rate": 9.95011065381135e-09, "loss": 4.1526, "step": 4364 }, { "epoch": 4.99, "grad_norm": 2.766834149990971, "learning_rate": 8.22323983453721e-09, "loss": 4.1963, "step": 4365 }, { "epoch": 4.99, "grad_norm": 3.7251947638100575, "learning_rate": 6.660830047655608e-09, "loss": 4.0511, "step": 4366 }, { "epoch": 4.99, "grad_norm": 3.21397471458884, "learning_rate": 5.262882149748015e-09, "loss": 4.1718, "step": 4367 }, { "epoch": 4.99, "grad_norm": 3.1957737714726817, "learning_rate": 4.0293969071347744e-09, "loss": 3.9336, "step": 4368 }, { "epoch": 4.99, "grad_norm": 2.7131171639178047, "learning_rate": 2.960374996074932e-09, "loss": 4.1371, "step": 4369 }, { "epoch": 4.99, "grad_norm": 4.961320187940303, "learning_rate": 2.055817002599713e-09, "loss": 4.2498, "step": 4370 }, { "epoch": 5.0, "grad_norm": 3.3099955852103706, "learning_rate": 1.3157234226457426e-09, "loss": 4.2319, "step": 4371 }, { "epoch": 5.0, "grad_norm": 3.74023747011329, "learning_rate": 7.400946619218196e-10, "loss": 4.2399, "step": 4372 }, { "epoch": 5.0, "grad_norm": 2.528757091762162, "learning_rate": 3.289310359755326e-10, "loss": 4.0392, "step": 4373 }, { "epoch": 5.0, "grad_norm": 3.6277130145875223, "learning_rate": 8.22327702598713e-11, "loss": 4.0927, "step": 4374 }, { "epoch": 5.0, "grad_norm": 2.5685903470353098, "learning_rate": 0.0, "loss": 3.9198, "step": 4375 }, { "epoch": 5.0, "step": 4375, "total_flos": 75070389288960.0, "train_loss": 4.425508696310861, "train_runtime": 40327.6362, "train_samples_per_second": 13.872, "train_steps_per_second": 0.108 } ], "logging_steps": 1.0, "max_steps": 4375, "num_input_tokens_seen": 0, "num_train_epochs": 5, "save_steps": 500, "total_flos": 75070389288960.0, "train_batch_size": 32, "trial_name": null, "trial_params": null }