{ "best_metric": 0.3389103412628174, "best_model_checkpoint": "./w2v-bert-2.0-chichewa_34h/checkpoint-4000", "epoch": 44.946478873239435, "eval_steps": 1000, "global_step": 8000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.005633802816901409, "grad_norm": 23.37450408935547, "learning_rate": 3.0000000000000004e-09, "loss": 10.0931, "step": 1 }, { "epoch": 0.011267605633802818, "grad_norm": 23.055225372314453, "learning_rate": 6.000000000000001e-09, "loss": 9.7041, "step": 2 }, { "epoch": 0.016901408450704224, "grad_norm": 21.591968536376953, "learning_rate": 9e-09, "loss": 9.2937, "step": 3 }, { "epoch": 0.022535211267605635, "grad_norm": 19.100778579711914, "learning_rate": 1.2000000000000002e-08, "loss": 8.3783, "step": 4 }, { "epoch": 0.028169014084507043, "grad_norm": 20.580049514770508, "learning_rate": 1.5000000000000002e-08, "loss": 8.7382, "step": 5 }, { "epoch": 0.03380281690140845, "grad_norm": 19.053171157836914, "learning_rate": 1.8e-08, "loss": 8.1976, "step": 6 }, { "epoch": 0.03943661971830986, "grad_norm": 20.025707244873047, "learning_rate": 2.1e-08, "loss": 8.5869, "step": 7 }, { "epoch": 0.04507042253521127, "grad_norm": 19.60275650024414, "learning_rate": 2.4000000000000003e-08, "loss": 8.2925, "step": 8 }, { "epoch": 0.05070422535211268, "grad_norm": 19.109025955200195, "learning_rate": 2.7e-08, "loss": 8.3882, "step": 9 }, { "epoch": 0.056338028169014086, "grad_norm": 18.777395248413086, "learning_rate": 3.0000000000000004e-08, "loss": 8.0625, "step": 10 }, { "epoch": 0.061971830985915494, "grad_norm": 19.636445999145508, "learning_rate": 3.3000000000000004e-08, "loss": 8.4087, "step": 11 }, { "epoch": 0.0676056338028169, "grad_norm": 20.13718032836914, "learning_rate": 3.6e-08, "loss": 8.6273, "step": 12 }, { "epoch": 0.07323943661971831, "grad_norm": 19.168004989624023, "learning_rate": 3.9e-08, "loss": 8.1722, "step": 13 }, { "epoch": 0.07887323943661972, "grad_norm": 19.736047744750977, "learning_rate": 4.2e-08, "loss": 8.4533, "step": 14 }, { "epoch": 0.08450704225352113, "grad_norm": 18.998886108398438, "learning_rate": 4.5e-08, "loss": 8.145, "step": 15 }, { "epoch": 0.09014084507042254, "grad_norm": 19.929027557373047, "learning_rate": 4.8000000000000006e-08, "loss": 8.3833, "step": 16 }, { "epoch": 0.09577464788732394, "grad_norm": 18.18242645263672, "learning_rate": 5.1e-08, "loss": 7.8769, "step": 17 }, { "epoch": 0.10140845070422536, "grad_norm": 19.729808807373047, "learning_rate": 5.4e-08, "loss": 8.3034, "step": 18 }, { "epoch": 0.10704225352112676, "grad_norm": 19.35500144958496, "learning_rate": 5.7e-08, "loss": 8.1648, "step": 19 }, { "epoch": 0.11267605633802817, "grad_norm": 19.351375579833984, "learning_rate": 6.000000000000001e-08, "loss": 8.2531, "step": 20 }, { "epoch": 0.11830985915492957, "grad_norm": 20.13310432434082, "learning_rate": 6.3e-08, "loss": 8.4955, "step": 21 }, { "epoch": 0.12394366197183099, "grad_norm": 19.18810272216797, "learning_rate": 6.600000000000001e-08, "loss": 8.0951, "step": 22 }, { "epoch": 0.1295774647887324, "grad_norm": 19.997060775756836, "learning_rate": 6.9e-08, "loss": 8.3085, "step": 23 }, { "epoch": 0.1352112676056338, "grad_norm": 19.73467445373535, "learning_rate": 7.2e-08, "loss": 8.0012, "step": 24 }, { "epoch": 0.14084507042253522, "grad_norm": 20.199962615966797, "learning_rate": 7.500000000000001e-08, "loss": 8.1924, "step": 25 }, { "epoch": 0.14647887323943662, "grad_norm": 19.340503692626953, "learning_rate": 7.8e-08, "loss": 8.1655, "step": 26 }, { "epoch": 0.15211267605633802, "grad_norm": 18.63260269165039, "learning_rate": 8.100000000000001e-08, "loss": 7.8248, "step": 27 }, { "epoch": 0.15774647887323945, "grad_norm": 18.444799423217773, "learning_rate": 8.4e-08, "loss": 7.9568, "step": 28 }, { "epoch": 0.16338028169014085, "grad_norm": 19.628917694091797, "learning_rate": 8.7e-08, "loss": 7.9813, "step": 29 }, { "epoch": 0.16901408450704225, "grad_norm": 18.812955856323242, "learning_rate": 9e-08, "loss": 7.8513, "step": 30 }, { "epoch": 0.17464788732394365, "grad_norm": 19.53274154663086, "learning_rate": 9.3e-08, "loss": 8.0772, "step": 31 }, { "epoch": 0.18028169014084508, "grad_norm": 18.708633422851562, "learning_rate": 9.600000000000001e-08, "loss": 7.7278, "step": 32 }, { "epoch": 0.18591549295774648, "grad_norm": 18.192834854125977, "learning_rate": 9.9e-08, "loss": 7.7268, "step": 33 }, { "epoch": 0.19154929577464788, "grad_norm": 18.53217887878418, "learning_rate": 1.02e-07, "loss": 7.6227, "step": 34 }, { "epoch": 0.19718309859154928, "grad_norm": 18.989917755126953, "learning_rate": 1.05e-07, "loss": 7.7894, "step": 35 }, { "epoch": 0.2028169014084507, "grad_norm": 19.437061309814453, "learning_rate": 1.08e-07, "loss": 7.6945, "step": 36 }, { "epoch": 0.2084507042253521, "grad_norm": 18.471364974975586, "learning_rate": 1.11e-07, "loss": 7.524, "step": 37 }, { "epoch": 0.2140845070422535, "grad_norm": 19.86949920654297, "learning_rate": 1.14e-07, "loss": 7.8956, "step": 38 }, { "epoch": 0.21971830985915494, "grad_norm": 19.169902801513672, "learning_rate": 1.17e-07, "loss": 7.6071, "step": 39 }, { "epoch": 0.22535211267605634, "grad_norm": 18.673480987548828, "learning_rate": 1.2000000000000002e-07, "loss": 7.3223, "step": 40 }, { "epoch": 0.23098591549295774, "grad_norm": 18.413576126098633, "learning_rate": 1.23e-07, "loss": 7.2614, "step": 41 }, { "epoch": 0.23661971830985915, "grad_norm": 18.13873863220215, "learning_rate": 1.26e-07, "loss": 7.179, "step": 42 }, { "epoch": 0.24225352112676057, "grad_norm": 18.680736541748047, "learning_rate": 1.29e-07, "loss": 7.36, "step": 43 }, { "epoch": 0.24788732394366197, "grad_norm": 18.37202262878418, "learning_rate": 1.3200000000000002e-07, "loss": 7.1083, "step": 44 }, { "epoch": 0.2535211267605634, "grad_norm": 28.554738998413086, "learning_rate": 1.35e-07, "loss": 10.9576, "step": 45 }, { "epoch": 0.2591549295774648, "grad_norm": 23.22928810119629, "learning_rate": 1.38e-07, "loss": 9.4035, "step": 46 }, { "epoch": 0.2647887323943662, "grad_norm": 22.669530868530273, "learning_rate": 1.41e-07, "loss": 9.0631, "step": 47 }, { "epoch": 0.2704225352112676, "grad_norm": 21.29054069519043, "learning_rate": 1.44e-07, "loss": 8.5767, "step": 48 }, { "epoch": 0.27605633802816903, "grad_norm": 21.267587661743164, "learning_rate": 1.47e-07, "loss": 8.5628, "step": 49 }, { "epoch": 0.28169014084507044, "grad_norm": 20.70184898376465, "learning_rate": 1.5000000000000002e-07, "loss": 8.4956, "step": 50 }, { "epoch": 0.28732394366197184, "grad_norm": 20.579097747802734, "learning_rate": 1.53e-07, "loss": 8.4322, "step": 51 }, { "epoch": 0.29295774647887324, "grad_norm": 20.76494026184082, "learning_rate": 1.56e-07, "loss": 8.3483, "step": 52 }, { "epoch": 0.29859154929577464, "grad_norm": 22.010337829589844, "learning_rate": 1.59e-07, "loss": 8.6267, "step": 53 }, { "epoch": 0.30422535211267604, "grad_norm": 20.0366268157959, "learning_rate": 1.6200000000000002e-07, "loss": 8.0702, "step": 54 }, { "epoch": 0.30985915492957744, "grad_norm": 20.99293327331543, "learning_rate": 1.6499999999999998e-07, "loss": 8.232, "step": 55 }, { "epoch": 0.3154929577464789, "grad_norm": 19.92287254333496, "learning_rate": 1.68e-07, "loss": 8.09, "step": 56 }, { "epoch": 0.3211267605633803, "grad_norm": 20.472942352294922, "learning_rate": 1.71e-07, "loss": 8.0906, "step": 57 }, { "epoch": 0.3267605633802817, "grad_norm": 21.892868041992188, "learning_rate": 1.74e-07, "loss": 8.4593, "step": 58 }, { "epoch": 0.3323943661971831, "grad_norm": 21.296226501464844, "learning_rate": 1.77e-07, "loss": 8.2587, "step": 59 }, { "epoch": 0.3380281690140845, "grad_norm": 21.45792579650879, "learning_rate": 1.8e-07, "loss": 8.2867, "step": 60 }, { "epoch": 0.3436619718309859, "grad_norm": 20.300464630126953, "learning_rate": 1.83e-07, "loss": 7.7559, "step": 61 }, { "epoch": 0.3492957746478873, "grad_norm": 21.698631286621094, "learning_rate": 1.86e-07, "loss": 8.199, "step": 62 }, { "epoch": 0.35492957746478876, "grad_norm": 19.86581039428711, "learning_rate": 1.89e-07, "loss": 7.6167, "step": 63 }, { "epoch": 0.36056338028169016, "grad_norm": 21.967016220092773, "learning_rate": 1.9200000000000003e-07, "loss": 8.1939, "step": 64 }, { "epoch": 0.36619718309859156, "grad_norm": 21.37307357788086, "learning_rate": 1.9499999999999999e-07, "loss": 8.1185, "step": 65 }, { "epoch": 0.37183098591549296, "grad_norm": 20.290283203125, "learning_rate": 1.98e-07, "loss": 7.8508, "step": 66 }, { "epoch": 0.37746478873239436, "grad_norm": 21.446462631225586, "learning_rate": 2.01e-07, "loss": 7.8273, "step": 67 }, { "epoch": 0.38309859154929576, "grad_norm": 21.343917846679688, "learning_rate": 2.04e-07, "loss": 7.8103, "step": 68 }, { "epoch": 0.38873239436619716, "grad_norm": 20.38936996459961, "learning_rate": 2.0700000000000001e-07, "loss": 7.5509, "step": 69 }, { "epoch": 0.39436619718309857, "grad_norm": 21.852344512939453, "learning_rate": 2.1e-07, "loss": 7.975, "step": 70 }, { "epoch": 0.4, "grad_norm": 21.029077529907227, "learning_rate": 2.1300000000000001e-07, "loss": 7.7329, "step": 71 }, { "epoch": 0.4056338028169014, "grad_norm": 21.277189254760742, "learning_rate": 2.16e-07, "loss": 7.697, "step": 72 }, { "epoch": 0.4112676056338028, "grad_norm": 20.803091049194336, "learning_rate": 2.1900000000000002e-07, "loss": 7.6135, "step": 73 }, { "epoch": 0.4169014084507042, "grad_norm": 19.920984268188477, "learning_rate": 2.22e-07, "loss": 7.3491, "step": 74 }, { "epoch": 0.4225352112676056, "grad_norm": 20.588159561157227, "learning_rate": 2.25e-07, "loss": 7.3246, "step": 75 }, { "epoch": 0.428169014084507, "grad_norm": 20.864524841308594, "learning_rate": 2.28e-07, "loss": 7.4887, "step": 76 }, { "epoch": 0.43380281690140843, "grad_norm": 20.144886016845703, "learning_rate": 2.3100000000000002e-07, "loss": 7.2445, "step": 77 }, { "epoch": 0.4394366197183099, "grad_norm": 20.628171920776367, "learning_rate": 2.34e-07, "loss": 7.1465, "step": 78 }, { "epoch": 0.4450704225352113, "grad_norm": 21.245912551879883, "learning_rate": 2.3700000000000002e-07, "loss": 7.4386, "step": 79 }, { "epoch": 0.4507042253521127, "grad_norm": 22.11734390258789, "learning_rate": 2.4000000000000003e-07, "loss": 7.3555, "step": 80 }, { "epoch": 0.4563380281690141, "grad_norm": 21.49259376525879, "learning_rate": 2.43e-07, "loss": 7.2534, "step": 81 }, { "epoch": 0.4619718309859155, "grad_norm": 21.531265258789062, "learning_rate": 2.46e-07, "loss": 7.2902, "step": 82 }, { "epoch": 0.4676056338028169, "grad_norm": 23.177501678466797, "learning_rate": 2.49e-07, "loss": 7.5368, "step": 83 }, { "epoch": 0.4732394366197183, "grad_norm": 21.143033981323242, "learning_rate": 2.52e-07, "loss": 7.0782, "step": 84 }, { "epoch": 0.4788732394366197, "grad_norm": 21.302194595336914, "learning_rate": 2.5500000000000005e-07, "loss": 6.9088, "step": 85 }, { "epoch": 0.48450704225352115, "grad_norm": 20.92340660095215, "learning_rate": 2.58e-07, "loss": 6.8607, "step": 86 }, { "epoch": 0.49014084507042255, "grad_norm": 21.352828979492188, "learning_rate": 2.6099999999999997e-07, "loss": 6.8558, "step": 87 }, { "epoch": 0.49577464788732395, "grad_norm": 20.361351013183594, "learning_rate": 2.6400000000000003e-07, "loss": 6.574, "step": 88 }, { "epoch": 0.5014084507042254, "grad_norm": 32.090736389160156, "learning_rate": 2.67e-07, "loss": 9.4598, "step": 89 }, { "epoch": 0.5070422535211268, "grad_norm": 29.89952850341797, "learning_rate": 2.7e-07, "loss": 8.9964, "step": 90 }, { "epoch": 0.5126760563380282, "grad_norm": 26.13751983642578, "learning_rate": 2.73e-07, "loss": 8.2028, "step": 91 }, { "epoch": 0.5183098591549296, "grad_norm": 24.984628677368164, "learning_rate": 2.76e-07, "loss": 7.9064, "step": 92 }, { "epoch": 0.523943661971831, "grad_norm": 26.014310836791992, "learning_rate": 2.79e-07, "loss": 7.9926, "step": 93 }, { "epoch": 0.5295774647887324, "grad_norm": 24.743000030517578, "learning_rate": 2.82e-07, "loss": 7.7252, "step": 94 }, { "epoch": 0.5352112676056338, "grad_norm": 26.368148803710938, "learning_rate": 2.85e-07, "loss": 7.9539, "step": 95 }, { "epoch": 0.5408450704225352, "grad_norm": 27.547103881835938, "learning_rate": 2.88e-07, "loss": 8.0634, "step": 96 }, { "epoch": 0.5464788732394367, "grad_norm": 25.676986694335938, "learning_rate": 2.91e-07, "loss": 7.727, "step": 97 }, { "epoch": 0.5521126760563381, "grad_norm": 26.837203979492188, "learning_rate": 2.94e-07, "loss": 7.8317, "step": 98 }, { "epoch": 0.5577464788732395, "grad_norm": 26.280786514282227, "learning_rate": 2.97e-07, "loss": 7.6846, "step": 99 }, { "epoch": 0.5633802816901409, "grad_norm": 27.732624053955078, "learning_rate": 3.0000000000000004e-07, "loss": 7.79, "step": 100 }, { "epoch": 0.5690140845070423, "grad_norm": 27.480138778686523, "learning_rate": 3.03e-07, "loss": 7.6279, "step": 101 }, { "epoch": 0.5746478873239437, "grad_norm": 25.806196212768555, "learning_rate": 3.06e-07, "loss": 7.3647, "step": 102 }, { "epoch": 0.5802816901408451, "grad_norm": 27.241296768188477, "learning_rate": 3.0900000000000003e-07, "loss": 7.6385, "step": 103 }, { "epoch": 0.5859154929577465, "grad_norm": 27.93297004699707, "learning_rate": 3.12e-07, "loss": 7.6403, "step": 104 }, { "epoch": 0.5915492957746479, "grad_norm": 28.791919708251953, "learning_rate": 3.15e-07, "loss": 7.6192, "step": 105 }, { "epoch": 0.5971830985915493, "grad_norm": 28.215551376342773, "learning_rate": 3.18e-07, "loss": 7.4607, "step": 106 }, { "epoch": 0.6028169014084507, "grad_norm": 29.93889617919922, "learning_rate": 3.21e-07, "loss": 7.6467, "step": 107 }, { "epoch": 0.6084507042253521, "grad_norm": 27.232234954833984, "learning_rate": 3.2400000000000004e-07, "loss": 7.1521, "step": 108 }, { "epoch": 0.6140845070422535, "grad_norm": 28.14197540283203, "learning_rate": 3.27e-07, "loss": 7.2471, "step": 109 }, { "epoch": 0.6197183098591549, "grad_norm": 27.78325653076172, "learning_rate": 3.2999999999999996e-07, "loss": 7.153, "step": 110 }, { "epoch": 0.6253521126760564, "grad_norm": 27.265422821044922, "learning_rate": 3.3300000000000003e-07, "loss": 7.0405, "step": 111 }, { "epoch": 0.6309859154929578, "grad_norm": 29.104612350463867, "learning_rate": 3.36e-07, "loss": 7.294, "step": 112 }, { "epoch": 0.6366197183098592, "grad_norm": 28.48741912841797, "learning_rate": 3.39e-07, "loss": 7.0529, "step": 113 }, { "epoch": 0.6422535211267606, "grad_norm": 29.633562088012695, "learning_rate": 3.42e-07, "loss": 7.0792, "step": 114 }, { "epoch": 0.647887323943662, "grad_norm": 29.809619903564453, "learning_rate": 3.45e-07, "loss": 7.1284, "step": 115 }, { "epoch": 0.6535211267605634, "grad_norm": 29.871963500976562, "learning_rate": 3.48e-07, "loss": 7.0132, "step": 116 }, { "epoch": 0.6591549295774648, "grad_norm": 29.897907257080078, "learning_rate": 3.51e-07, "loss": 6.9608, "step": 117 }, { "epoch": 0.6647887323943662, "grad_norm": 30.40986442565918, "learning_rate": 3.54e-07, "loss": 7.0252, "step": 118 }, { "epoch": 0.6704225352112676, "grad_norm": 27.947280883789062, "learning_rate": 3.5700000000000003e-07, "loss": 6.595, "step": 119 }, { "epoch": 0.676056338028169, "grad_norm": 27.550397872924805, "learning_rate": 3.6e-07, "loss": 6.506, "step": 120 }, { "epoch": 0.6816901408450704, "grad_norm": 29.49580192565918, "learning_rate": 3.63e-07, "loss": 6.6519, "step": 121 }, { "epoch": 0.6873239436619718, "grad_norm": 26.66489028930664, "learning_rate": 3.66e-07, "loss": 6.2367, "step": 122 }, { "epoch": 0.6929577464788732, "grad_norm": 27.495731353759766, "learning_rate": 3.6900000000000004e-07, "loss": 6.3315, "step": 123 }, { "epoch": 0.6985915492957746, "grad_norm": 28.700468063354492, "learning_rate": 3.72e-07, "loss": 6.445, "step": 124 }, { "epoch": 0.704225352112676, "grad_norm": 29.51987075805664, "learning_rate": 3.75e-07, "loss": 6.4283, "step": 125 }, { "epoch": 0.7098591549295775, "grad_norm": 28.541399002075195, "learning_rate": 3.78e-07, "loss": 6.2563, "step": 126 }, { "epoch": 0.7154929577464789, "grad_norm": 30.63127899169922, "learning_rate": 3.81e-07, "loss": 6.4155, "step": 127 }, { "epoch": 0.7211267605633803, "grad_norm": 28.95149803161621, "learning_rate": 3.8400000000000005e-07, "loss": 6.2745, "step": 128 }, { "epoch": 0.7267605633802817, "grad_norm": 28.605030059814453, "learning_rate": 3.87e-07, "loss": 6.2393, "step": 129 }, { "epoch": 0.7323943661971831, "grad_norm": 27.956668853759766, "learning_rate": 3.8999999999999997e-07, "loss": 6.0958, "step": 130 }, { "epoch": 0.7380281690140845, "grad_norm": 28.11646270751953, "learning_rate": 3.9300000000000004e-07, "loss": 5.963, "step": 131 }, { "epoch": 0.7436619718309859, "grad_norm": 24.97135353088379, "learning_rate": 3.96e-07, "loss": 5.6173, "step": 132 }, { "epoch": 0.7492957746478873, "grad_norm": Infinity, "learning_rate": 3.96e-07, "loss": 8.1969, "step": 133 }, { "epoch": 0.7549295774647887, "grad_norm": 42.452293395996094, "learning_rate": 3.99e-07, "loss": 7.3189, "step": 134 }, { "epoch": 0.7605633802816901, "grad_norm": 39.87089157104492, "learning_rate": 4.02e-07, "loss": 6.9278, "step": 135 }, { "epoch": 0.7661971830985915, "grad_norm": 36.937870025634766, "learning_rate": 4.05e-07, "loss": 6.6801, "step": 136 }, { "epoch": 0.7718309859154929, "grad_norm": 36.147464752197266, "learning_rate": 4.08e-07, "loss": 6.6071, "step": 137 }, { "epoch": 0.7774647887323943, "grad_norm": 39.3355598449707, "learning_rate": 4.11e-07, "loss": 6.7304, "step": 138 }, { "epoch": 0.7830985915492957, "grad_norm": 34.039981842041016, "learning_rate": 4.1400000000000003e-07, "loss": 6.2375, "step": 139 }, { "epoch": 0.7887323943661971, "grad_norm": 36.65914535522461, "learning_rate": 4.17e-07, "loss": 6.4087, "step": 140 }, { "epoch": 0.7943661971830986, "grad_norm": 37.91633605957031, "learning_rate": 4.2e-07, "loss": 6.4484, "step": 141 }, { "epoch": 0.8, "grad_norm": 39.105125427246094, "learning_rate": 4.23e-07, "loss": 6.4929, "step": 142 }, { "epoch": 0.8056338028169014, "grad_norm": 40.3162727355957, "learning_rate": 4.2600000000000003e-07, "loss": 6.5174, "step": 143 }, { "epoch": 0.8112676056338028, "grad_norm": 35.70503234863281, "learning_rate": 4.2900000000000004e-07, "loss": 6.1367, "step": 144 }, { "epoch": 0.8169014084507042, "grad_norm": 36.86891174316406, "learning_rate": 4.32e-07, "loss": 6.1179, "step": 145 }, { "epoch": 0.8225352112676056, "grad_norm": 39.80533218383789, "learning_rate": 4.35e-07, "loss": 6.2952, "step": 146 }, { "epoch": 0.828169014084507, "grad_norm": 36.694480895996094, "learning_rate": 4.3800000000000003e-07, "loss": 5.9916, "step": 147 }, { "epoch": 0.8338028169014085, "grad_norm": 38.44818115234375, "learning_rate": 4.41e-07, "loss": 6.0445, "step": 148 }, { "epoch": 0.8394366197183099, "grad_norm": 38.265995025634766, "learning_rate": 4.44e-07, "loss": 6.0292, "step": 149 }, { "epoch": 0.8450704225352113, "grad_norm": 36.704952239990234, "learning_rate": 4.47e-07, "loss": 5.8879, "step": 150 }, { "epoch": 0.8507042253521127, "grad_norm": 37.268917083740234, "learning_rate": 4.5e-07, "loss": 5.8587, "step": 151 }, { "epoch": 0.856338028169014, "grad_norm": 35.31634521484375, "learning_rate": 4.5300000000000005e-07, "loss": 5.7078, "step": 152 }, { "epoch": 0.8619718309859155, "grad_norm": 37.19781494140625, "learning_rate": 4.56e-07, "loss": 5.7555, "step": 153 }, { "epoch": 0.8676056338028169, "grad_norm": 34.77775955200195, "learning_rate": 4.5899999999999997e-07, "loss": 5.6057, "step": 154 }, { "epoch": 0.8732394366197183, "grad_norm": 33.57127380371094, "learning_rate": 4.6200000000000003e-07, "loss": 5.4405, "step": 155 }, { "epoch": 0.8788732394366198, "grad_norm": 36.10496139526367, "learning_rate": 4.65e-07, "loss": 5.4799, "step": 156 }, { "epoch": 0.8845070422535212, "grad_norm": 36.44635009765625, "learning_rate": 4.68e-07, "loss": 5.4978, "step": 157 }, { "epoch": 0.8901408450704226, "grad_norm": 34.81025314331055, "learning_rate": 4.7099999999999997e-07, "loss": 5.351, "step": 158 }, { "epoch": 0.895774647887324, "grad_norm": 34.08943557739258, "learning_rate": 4.7400000000000004e-07, "loss": 5.2605, "step": 159 }, { "epoch": 0.9014084507042254, "grad_norm": 34.23727798461914, "learning_rate": 4.77e-07, "loss": 5.2195, "step": 160 }, { "epoch": 0.9070422535211268, "grad_norm": 32.607879638671875, "learning_rate": 4.800000000000001e-07, "loss": 5.1298, "step": 161 }, { "epoch": 0.9126760563380282, "grad_norm": 33.94306564331055, "learning_rate": 4.83e-07, "loss": 5.1769, "step": 162 }, { "epoch": 0.9183098591549296, "grad_norm": 31.63994026184082, "learning_rate": 4.86e-07, "loss": 5.024, "step": 163 }, { "epoch": 0.923943661971831, "grad_norm": 33.26905059814453, "learning_rate": 4.89e-07, "loss": 4.9806, "step": 164 }, { "epoch": 0.9295774647887324, "grad_norm": 32.36957931518555, "learning_rate": 4.92e-07, "loss": 4.9891, "step": 165 }, { "epoch": 0.9352112676056338, "grad_norm": 28.738908767700195, "learning_rate": 4.95e-07, "loss": 4.7682, "step": 166 }, { "epoch": 0.9408450704225352, "grad_norm": 28.542316436767578, "learning_rate": 4.98e-07, "loss": 4.8045, "step": 167 }, { "epoch": 0.9464788732394366, "grad_norm": 25.792383193969727, "learning_rate": 5.01e-07, "loss": 4.6496, "step": 168 }, { "epoch": 0.952112676056338, "grad_norm": 27.16576385498047, "learning_rate": 5.04e-07, "loss": 4.6519, "step": 169 }, { "epoch": 0.9577464788732394, "grad_norm": 25.441028594970703, "learning_rate": 5.07e-07, "loss": 4.5866, "step": 170 }, { "epoch": 0.9633802816901409, "grad_norm": 25.167131423950195, "learning_rate": 5.100000000000001e-07, "loss": 4.6062, "step": 171 }, { "epoch": 0.9690140845070423, "grad_norm": 23.295936584472656, "learning_rate": 5.13e-07, "loss": 4.5423, "step": 172 }, { "epoch": 0.9746478873239437, "grad_norm": 21.83075523376465, "learning_rate": 5.16e-07, "loss": 4.4349, "step": 173 }, { "epoch": 0.9802816901408451, "grad_norm": 18.890296936035156, "learning_rate": 5.19e-07, "loss": 4.3618, "step": 174 }, { "epoch": 0.9859154929577465, "grad_norm": 17.86733627319336, "learning_rate": 5.219999999999999e-07, "loss": 4.3321, "step": 175 }, { "epoch": 0.9915492957746479, "grad_norm": 15.089102745056152, "learning_rate": 5.250000000000001e-07, "loss": 4.2655, "step": 176 }, { "epoch": 0.9971830985915493, "grad_norm": 20.50058937072754, "learning_rate": 5.280000000000001e-07, "loss": 4.4262, "step": 177 }, { "epoch": 1.0, "grad_norm": 7.529200553894043, "learning_rate": 5.31e-07, "loss": 2.1518, "step": 178 }, { "epoch": 1.0056338028169014, "grad_norm": 28.378694534301758, "learning_rate": 5.34e-07, "loss": 4.7345, "step": 179 }, { "epoch": 1.0112676056338028, "grad_norm": 24.153104782104492, "learning_rate": 5.37e-07, "loss": 4.617, "step": 180 }, { "epoch": 1.0169014084507042, "grad_norm": 16.407052993774414, "learning_rate": 5.4e-07, "loss": 4.3972, "step": 181 }, { "epoch": 1.0225352112676056, "grad_norm": 13.738978385925293, "learning_rate": 5.43e-07, "loss": 4.3588, "step": 182 }, { "epoch": 1.028169014084507, "grad_norm": 11.753392219543457, "learning_rate": 5.46e-07, "loss": 4.3117, "step": 183 }, { "epoch": 1.0338028169014084, "grad_norm": 10.56106185913086, "learning_rate": 5.490000000000001e-07, "loss": 4.2987, "step": 184 }, { "epoch": 1.0394366197183098, "grad_norm": 8.984735488891602, "learning_rate": 5.52e-07, "loss": 4.2585, "step": 185 }, { "epoch": 1.0450704225352112, "grad_norm": 7.201539516448975, "learning_rate": 5.55e-07, "loss": 4.2362, "step": 186 }, { "epoch": 1.0507042253521126, "grad_norm": 6.696246147155762, "learning_rate": 5.58e-07, "loss": 4.2512, "step": 187 }, { "epoch": 1.056338028169014, "grad_norm": 7.102692604064941, "learning_rate": 5.61e-07, "loss": 4.2358, "step": 188 }, { "epoch": 1.0619718309859154, "grad_norm": 7.0099616050720215, "learning_rate": 5.64e-07, "loss": 4.252, "step": 189 }, { "epoch": 1.0676056338028168, "grad_norm": 6.835728645324707, "learning_rate": 5.67e-07, "loss": 4.2108, "step": 190 }, { "epoch": 1.0732394366197182, "grad_norm": 7.586347579956055, "learning_rate": 5.7e-07, "loss": 4.1935, "step": 191 }, { "epoch": 1.0788732394366196, "grad_norm": 7.363047122955322, "learning_rate": 5.73e-07, "loss": 4.1578, "step": 192 }, { "epoch": 1.084507042253521, "grad_norm": 7.040609836578369, "learning_rate": 5.76e-07, "loss": 4.136, "step": 193 }, { "epoch": 1.0901408450704226, "grad_norm": 6.436159610748291, "learning_rate": 5.790000000000001e-07, "loss": 4.1612, "step": 194 }, { "epoch": 1.095774647887324, "grad_norm": 7.356907367706299, "learning_rate": 5.82e-07, "loss": 4.096, "step": 195 }, { "epoch": 1.1014084507042254, "grad_norm": 6.898291110992432, "learning_rate": 5.85e-07, "loss": 4.0987, "step": 196 }, { "epoch": 1.1070422535211268, "grad_norm": 6.676201343536377, "learning_rate": 5.88e-07, "loss": 4.0404, "step": 197 }, { "epoch": 1.1126760563380282, "grad_norm": 7.3079352378845215, "learning_rate": 5.909999999999999e-07, "loss": 4.0107, "step": 198 }, { "epoch": 1.1183098591549296, "grad_norm": 6.069024562835693, "learning_rate": 5.94e-07, "loss": 4.0537, "step": 199 }, { "epoch": 1.123943661971831, "grad_norm": 6.009862899780273, "learning_rate": 5.970000000000001e-07, "loss": 4.0273, "step": 200 }, { "epoch": 1.1295774647887324, "grad_norm": 6.165462970733643, "learning_rate": 6.000000000000001e-07, "loss": 4.037, "step": 201 }, { "epoch": 1.1352112676056338, "grad_norm": 6.048194408416748, "learning_rate": 6.03e-07, "loss": 3.9988, "step": 202 }, { "epoch": 1.1408450704225352, "grad_norm": 6.066869258880615, "learning_rate": 6.06e-07, "loss": 3.9743, "step": 203 }, { "epoch": 1.1464788732394366, "grad_norm": 5.647124290466309, "learning_rate": 6.09e-07, "loss": 3.9283, "step": 204 }, { "epoch": 1.152112676056338, "grad_norm": 6.250226974487305, "learning_rate": 6.12e-07, "loss": 4.0008, "step": 205 }, { "epoch": 1.1577464788732394, "grad_norm": 5.445071697235107, "learning_rate": 6.15e-07, "loss": 3.9175, "step": 206 }, { "epoch": 1.1633802816901408, "grad_norm": 5.444154262542725, "learning_rate": 6.180000000000001e-07, "loss": 3.9345, "step": 207 }, { "epoch": 1.1690140845070423, "grad_norm": 5.232634544372559, "learning_rate": 6.21e-07, "loss": 3.9629, "step": 208 }, { "epoch": 1.1746478873239437, "grad_norm": 5.474876403808594, "learning_rate": 6.24e-07, "loss": 3.8974, "step": 209 }, { "epoch": 1.180281690140845, "grad_norm": 5.5575642585754395, "learning_rate": 6.27e-07, "loss": 3.8296, "step": 210 }, { "epoch": 1.1859154929577465, "grad_norm": 5.940123081207275, "learning_rate": 6.3e-07, "loss": 3.8652, "step": 211 }, { "epoch": 1.1915492957746479, "grad_norm": 5.474195957183838, "learning_rate": 6.33e-07, "loss": 3.8572, "step": 212 }, { "epoch": 1.1971830985915493, "grad_norm": 6.2269415855407715, "learning_rate": 6.36e-07, "loss": 3.7893, "step": 213 }, { "epoch": 1.2028169014084507, "grad_norm": 5.20849609375, "learning_rate": 6.39e-07, "loss": 3.8468, "step": 214 }, { "epoch": 1.208450704225352, "grad_norm": 6.209558963775635, "learning_rate": 6.42e-07, "loss": 3.7182, "step": 215 }, { "epoch": 1.2140845070422535, "grad_norm": 5.492266654968262, "learning_rate": 6.45e-07, "loss": 3.7881, "step": 216 }, { "epoch": 1.2197183098591549, "grad_norm": 4.731509208679199, "learning_rate": 6.480000000000001e-07, "loss": 3.7978, "step": 217 }, { "epoch": 1.2253521126760563, "grad_norm": 5.000571250915527, "learning_rate": 6.51e-07, "loss": 3.7661, "step": 218 }, { "epoch": 1.2309859154929577, "grad_norm": 5.254445552825928, "learning_rate": 6.54e-07, "loss": 3.7341, "step": 219 }, { "epoch": 1.236619718309859, "grad_norm": 4.641867637634277, "learning_rate": 6.57e-07, "loss": 3.767, "step": 220 }, { "epoch": 1.2422535211267607, "grad_norm": 5.31817626953125, "learning_rate": 6.599999999999999e-07, "loss": 3.7103, "step": 221 }, { "epoch": 1.247887323943662, "grad_norm": 5.551826477050781, "learning_rate": 6.63e-07, "loss": 3.6299, "step": 222 }, { "epoch": 1.2535211267605635, "grad_norm": 17.55986213684082, "learning_rate": 6.660000000000001e-07, "loss": 4.1343, "step": 223 }, { "epoch": 1.2591549295774649, "grad_norm": 12.343875885009766, "learning_rate": 6.690000000000001e-07, "loss": 4.0199, "step": 224 }, { "epoch": 1.2647887323943663, "grad_norm": 10.083864212036133, "learning_rate": 6.72e-07, "loss": 3.9426, "step": 225 }, { "epoch": 1.2704225352112677, "grad_norm": 6.6908345222473145, "learning_rate": 6.75e-07, "loss": 3.8179, "step": 226 }, { "epoch": 1.276056338028169, "grad_norm": 6.976649284362793, "learning_rate": 6.78e-07, "loss": 3.8272, "step": 227 }, { "epoch": 1.2816901408450705, "grad_norm": 5.452723503112793, "learning_rate": 6.81e-07, "loss": 3.7949, "step": 228 }, { "epoch": 1.287323943661972, "grad_norm": 4.782120704650879, "learning_rate": 6.84e-07, "loss": 3.739, "step": 229 }, { "epoch": 1.2929577464788733, "grad_norm": 5.477733612060547, "learning_rate": 6.87e-07, "loss": 3.7878, "step": 230 }, { "epoch": 1.2985915492957747, "grad_norm": 4.35331916809082, "learning_rate": 6.9e-07, "loss": 3.7312, "step": 231 }, { "epoch": 1.304225352112676, "grad_norm": 4.688896656036377, "learning_rate": 6.93e-07, "loss": 3.6926, "step": 232 }, { "epoch": 1.3098591549295775, "grad_norm": 4.093502998352051, "learning_rate": 6.96e-07, "loss": 3.7448, "step": 233 }, { "epoch": 1.315492957746479, "grad_norm": 4.601402282714844, "learning_rate": 6.990000000000001e-07, "loss": 3.733, "step": 234 }, { "epoch": 1.3211267605633803, "grad_norm": 5.0725555419921875, "learning_rate": 7.02e-07, "loss": 3.6794, "step": 235 }, { "epoch": 1.3267605633802817, "grad_norm": 4.2628374099731445, "learning_rate": 7.05e-07, "loss": 3.6747, "step": 236 }, { "epoch": 1.332394366197183, "grad_norm": 4.159196853637695, "learning_rate": 7.08e-07, "loss": 3.6834, "step": 237 }, { "epoch": 1.3380281690140845, "grad_norm": 3.971897602081299, "learning_rate": 7.11e-07, "loss": 3.6498, "step": 238 }, { "epoch": 1.343661971830986, "grad_norm": 4.230648517608643, "learning_rate": 7.140000000000001e-07, "loss": 3.6179, "step": 239 }, { "epoch": 1.3492957746478873, "grad_norm": 4.850123405456543, "learning_rate": 7.170000000000001e-07, "loss": 3.5813, "step": 240 }, { "epoch": 1.3549295774647887, "grad_norm": 3.619288206100464, "learning_rate": 7.2e-07, "loss": 3.6227, "step": 241 }, { "epoch": 1.36056338028169, "grad_norm": 4.144800662994385, "learning_rate": 7.23e-07, "loss": 3.5781, "step": 242 }, { "epoch": 1.3661971830985915, "grad_norm": 3.449338912963867, "learning_rate": 7.26e-07, "loss": 3.5961, "step": 243 }, { "epoch": 1.371830985915493, "grad_norm": 3.306309938430786, "learning_rate": 7.29e-07, "loss": 3.542, "step": 244 }, { "epoch": 1.3774647887323943, "grad_norm": 3.7249999046325684, "learning_rate": 7.32e-07, "loss": 3.5631, "step": 245 }, { "epoch": 1.3830985915492957, "grad_norm": 3.403693914413452, "learning_rate": 7.350000000000001e-07, "loss": 3.4998, "step": 246 }, { "epoch": 1.388732394366197, "grad_norm": 3.535109519958496, "learning_rate": 7.380000000000001e-07, "loss": 3.5187, "step": 247 }, { "epoch": 1.3943661971830985, "grad_norm": 3.3754496574401855, "learning_rate": 7.41e-07, "loss": 3.5296, "step": 248 }, { "epoch": 1.4, "grad_norm": 3.4589614868164062, "learning_rate": 7.44e-07, "loss": 3.5365, "step": 249 }, { "epoch": 1.4056338028169013, "grad_norm": 3.0371317863464355, "learning_rate": 7.47e-07, "loss": 3.4723, "step": 250 }, { "epoch": 1.4112676056338027, "grad_norm": 3.40959095954895, "learning_rate": 7.5e-07, "loss": 3.47, "step": 251 }, { "epoch": 1.4169014084507041, "grad_norm": 3.2615842819213867, "learning_rate": 7.53e-07, "loss": 3.5185, "step": 252 }, { "epoch": 1.4225352112676055, "grad_norm": 2.8736321926116943, "learning_rate": 7.56e-07, "loss": 3.5047, "step": 253 }, { "epoch": 1.428169014084507, "grad_norm": 3.355738639831543, "learning_rate": 7.59e-07, "loss": 3.4177, "step": 254 }, { "epoch": 1.4338028169014083, "grad_norm": 3.714266300201416, "learning_rate": 7.62e-07, "loss": 3.4446, "step": 255 }, { "epoch": 1.43943661971831, "grad_norm": 3.5116286277770996, "learning_rate": 7.65e-07, "loss": 3.4098, "step": 256 }, { "epoch": 1.4450704225352113, "grad_norm": 3.016411542892456, "learning_rate": 7.680000000000001e-07, "loss": 3.3968, "step": 257 }, { "epoch": 1.4507042253521127, "grad_norm": 3.350151538848877, "learning_rate": 7.71e-07, "loss": 3.3943, "step": 258 }, { "epoch": 1.4563380281690141, "grad_norm": 3.121488571166992, "learning_rate": 7.74e-07, "loss": 3.3664, "step": 259 }, { "epoch": 1.4619718309859155, "grad_norm": 2.776261329650879, "learning_rate": 7.77e-07, "loss": 3.3606, "step": 260 }, { "epoch": 1.467605633802817, "grad_norm": 3.6956863403320312, "learning_rate": 7.799999999999999e-07, "loss": 3.3889, "step": 261 }, { "epoch": 1.4732394366197183, "grad_norm": 5.630766868591309, "learning_rate": 7.830000000000001e-07, "loss": 3.3498, "step": 262 }, { "epoch": 1.4788732394366197, "grad_norm": 4.4029693603515625, "learning_rate": 7.860000000000001e-07, "loss": 3.3344, "step": 263 }, { "epoch": 1.4845070422535211, "grad_norm": 2.447305917739868, "learning_rate": 7.89e-07, "loss": 3.3012, "step": 264 }, { "epoch": 1.4901408450704225, "grad_norm": 10.303879737854004, "learning_rate": 7.92e-07, "loss": 3.3133, "step": 265 }, { "epoch": 1.495774647887324, "grad_norm": 3.3926241397857666, "learning_rate": 7.95e-07, "loss": 3.3414, "step": 266 }, { "epoch": 1.5014084507042254, "grad_norm": 15.229345321655273, "learning_rate": 7.98e-07, "loss": 3.6492, "step": 267 }, { "epoch": 1.5070422535211268, "grad_norm": 8.38891887664795, "learning_rate": 8.01e-07, "loss": 3.4816, "step": 268 }, { "epoch": 1.5126760563380282, "grad_norm": 7.57686185836792, "learning_rate": 8.04e-07, "loss": 3.4408, "step": 269 }, { "epoch": 1.5183098591549296, "grad_norm": 4.836228847503662, "learning_rate": 8.070000000000001e-07, "loss": 3.4019, "step": 270 }, { "epoch": 1.523943661971831, "grad_norm": 4.142337322235107, "learning_rate": 8.1e-07, "loss": 3.3743, "step": 271 }, { "epoch": 1.5295774647887324, "grad_norm": 3.1797330379486084, "learning_rate": 8.13e-07, "loss": 3.3709, "step": 272 }, { "epoch": 1.5352112676056338, "grad_norm": 3.2781105041503906, "learning_rate": 8.16e-07, "loss": 3.3235, "step": 273 }, { "epoch": 1.5408450704225352, "grad_norm": 2.989748239517212, "learning_rate": 8.19e-07, "loss": 3.3489, "step": 274 }, { "epoch": 1.5464788732394368, "grad_norm": 3.2133727073669434, "learning_rate": 8.22e-07, "loss": 3.3769, "step": 275 }, { "epoch": 1.5521126760563382, "grad_norm": 2.8232502937316895, "learning_rate": 8.25e-07, "loss": 3.3146, "step": 276 }, { "epoch": 1.5577464788732396, "grad_norm": 3.972604751586914, "learning_rate": 8.280000000000001e-07, "loss": 3.3137, "step": 277 }, { "epoch": 1.563380281690141, "grad_norm": 3.0173697471618652, "learning_rate": 8.31e-07, "loss": 3.2774, "step": 278 }, { "epoch": 1.5690140845070424, "grad_norm": 2.3883769512176514, "learning_rate": 8.34e-07, "loss": 3.3073, "step": 279 }, { "epoch": 1.5746478873239438, "grad_norm": 2.2080185413360596, "learning_rate": 8.370000000000001e-07, "loss": 3.2611, "step": 280 }, { "epoch": 1.5802816901408452, "grad_norm": 3.058180570602417, "learning_rate": 8.4e-07, "loss": 3.2561, "step": 281 }, { "epoch": 1.5859154929577466, "grad_norm": 3.721339225769043, "learning_rate": 8.43e-07, "loss": 3.2693, "step": 282 }, { "epoch": 1.591549295774648, "grad_norm": 4.630777359008789, "learning_rate": 8.46e-07, "loss": 3.2459, "step": 283 }, { "epoch": 1.5971830985915494, "grad_norm": 2.207216501235962, "learning_rate": 8.489999999999999e-07, "loss": 3.215, "step": 284 }, { "epoch": 1.6028169014084508, "grad_norm": 2.0840320587158203, "learning_rate": 8.520000000000001e-07, "loss": 3.2255, "step": 285 }, { "epoch": 1.6084507042253522, "grad_norm": 2.489400625228882, "learning_rate": 8.550000000000001e-07, "loss": 3.2098, "step": 286 }, { "epoch": 1.6140845070422536, "grad_norm": 2.2128732204437256, "learning_rate": 8.580000000000001e-07, "loss": 3.236, "step": 287 }, { "epoch": 1.619718309859155, "grad_norm": 1.8475383520126343, "learning_rate": 8.61e-07, "loss": 3.2143, "step": 288 }, { "epoch": 1.6253521126760564, "grad_norm": 3.034592390060425, "learning_rate": 8.64e-07, "loss": 3.1599, "step": 289 }, { "epoch": 1.6309859154929578, "grad_norm": 3.48130464553833, "learning_rate": 8.67e-07, "loss": 3.1656, "step": 290 }, { "epoch": 1.6366197183098592, "grad_norm": 2.465686321258545, "learning_rate": 8.7e-07, "loss": 3.1281, "step": 291 }, { "epoch": 1.6422535211267606, "grad_norm": 2.5849077701568604, "learning_rate": 8.73e-07, "loss": 3.1726, "step": 292 }, { "epoch": 1.647887323943662, "grad_norm": 2.192965507507324, "learning_rate": 8.760000000000001e-07, "loss": 3.1238, "step": 293 }, { "epoch": 1.6535211267605634, "grad_norm": 1.818284273147583, "learning_rate": 8.79e-07, "loss": 3.1451, "step": 294 }, { "epoch": 1.6591549295774648, "grad_norm": 1.8378015756607056, "learning_rate": 8.82e-07, "loss": 3.1432, "step": 295 }, { "epoch": 1.6647887323943662, "grad_norm": 4.533341407775879, "learning_rate": 8.85e-07, "loss": 3.1177, "step": 296 }, { "epoch": 1.6704225352112676, "grad_norm": 3.2857859134674072, "learning_rate": 8.88e-07, "loss": 3.1139, "step": 297 }, { "epoch": 1.676056338028169, "grad_norm": 2.6295394897460938, "learning_rate": 8.91e-07, "loss": 3.1375, "step": 298 }, { "epoch": 1.6816901408450704, "grad_norm": 2.6304931640625, "learning_rate": 8.94e-07, "loss": 3.1008, "step": 299 }, { "epoch": 1.6873239436619718, "grad_norm": 2.0864717960357666, "learning_rate": 8.97e-07, "loss": 3.0836, "step": 300 }, { "epoch": 1.6929577464788732, "grad_norm": 1.9646295309066772, "learning_rate": 9e-07, "loss": 3.0995, "step": 301 }, { "epoch": 1.6985915492957746, "grad_norm": 2.4765114784240723, "learning_rate": 9.03e-07, "loss": 3.0557, "step": 302 }, { "epoch": 1.704225352112676, "grad_norm": 1.6692005395889282, "learning_rate": 9.060000000000001e-07, "loss": 3.0774, "step": 303 }, { "epoch": 1.7098591549295774, "grad_norm": 3.1556854248046875, "learning_rate": 9.09e-07, "loss": 3.103, "step": 304 }, { "epoch": 1.7154929577464788, "grad_norm": 4.288612365722656, "learning_rate": 9.12e-07, "loss": 3.0551, "step": 305 }, { "epoch": 1.7211267605633802, "grad_norm": 1.729299783706665, "learning_rate": 9.15e-07, "loss": 3.0499, "step": 306 }, { "epoch": 1.7267605633802816, "grad_norm": 3.1948986053466797, "learning_rate": 9.179999999999999e-07, "loss": 3.0367, "step": 307 }, { "epoch": 1.732394366197183, "grad_norm": 2.3002986907958984, "learning_rate": 9.210000000000001e-07, "loss": 3.0475, "step": 308 }, { "epoch": 1.7380281690140844, "grad_norm": 2.291055917739868, "learning_rate": 9.240000000000001e-07, "loss": 3.0386, "step": 309 }, { "epoch": 1.7436619718309858, "grad_norm": 3.8127360343933105, "learning_rate": 9.270000000000001e-07, "loss": 3.0193, "step": 310 }, { "epoch": 1.7492957746478872, "grad_norm": 8.73247241973877, "learning_rate": 9.3e-07, "loss": 3.3324, "step": 311 }, { "epoch": 1.7549295774647886, "grad_norm": 5.212857723236084, "learning_rate": 9.33e-07, "loss": 3.2241, "step": 312 }, { "epoch": 1.76056338028169, "grad_norm": 5.405500411987305, "learning_rate": 9.36e-07, "loss": 3.1716, "step": 313 }, { "epoch": 1.7661971830985914, "grad_norm": 3.3166182041168213, "learning_rate": 9.39e-07, "loss": 3.1203, "step": 314 }, { "epoch": 1.7718309859154928, "grad_norm": 4.144299507141113, "learning_rate": 9.419999999999999e-07, "loss": 3.1726, "step": 315 }, { "epoch": 1.7774647887323942, "grad_norm": 3.576324224472046, "learning_rate": 9.450000000000001e-07, "loss": 3.1518, "step": 316 }, { "epoch": 1.7830985915492956, "grad_norm": 3.0369715690612793, "learning_rate": 9.480000000000001e-07, "loss": 3.1077, "step": 317 }, { "epoch": 1.788732394366197, "grad_norm": 3.1641204357147217, "learning_rate": 9.51e-07, "loss": 3.1223, "step": 318 }, { "epoch": 1.7943661971830986, "grad_norm": 2.084167242050171, "learning_rate": 9.54e-07, "loss": 3.0577, "step": 319 }, { "epoch": 1.8, "grad_norm": 2.145345449447632, "learning_rate": 9.57e-07, "loss": 3.1057, "step": 320 }, { "epoch": 1.8056338028169014, "grad_norm": 3.1458144187927246, "learning_rate": 9.600000000000001e-07, "loss": 3.1221, "step": 321 }, { "epoch": 1.8112676056338028, "grad_norm": 2.8545215129852295, "learning_rate": 9.63e-07, "loss": 3.092, "step": 322 }, { "epoch": 1.8169014084507042, "grad_norm": 15.60283374786377, "learning_rate": 9.66e-07, "loss": 3.0654, "step": 323 }, { "epoch": 1.8225352112676056, "grad_norm": 2.4702093601226807, "learning_rate": 9.690000000000002e-07, "loss": 3.0606, "step": 324 }, { "epoch": 1.828169014084507, "grad_norm": 3.160369396209717, "learning_rate": 9.72e-07, "loss": 3.0641, "step": 325 }, { "epoch": 1.8338028169014085, "grad_norm": 2.9996860027313232, "learning_rate": 9.75e-07, "loss": 3.06, "step": 326 }, { "epoch": 1.8394366197183099, "grad_norm": 4.691850662231445, "learning_rate": 9.78e-07, "loss": 3.0031, "step": 327 }, { "epoch": 1.8450704225352113, "grad_norm": 2.0418202877044678, "learning_rate": 9.81e-07, "loss": 3.0525, "step": 328 }, { "epoch": 1.8507042253521127, "grad_norm": 3.0207626819610596, "learning_rate": 9.84e-07, "loss": 3.0289, "step": 329 }, { "epoch": 1.856338028169014, "grad_norm": 1.9885998964309692, "learning_rate": 9.87e-07, "loss": 3.0429, "step": 330 }, { "epoch": 1.8619718309859155, "grad_norm": 2.9951462745666504, "learning_rate": 9.9e-07, "loss": 3.0391, "step": 331 }, { "epoch": 1.8676056338028169, "grad_norm": 3.579716682434082, "learning_rate": 9.929999999999999e-07, "loss": 3.0725, "step": 332 }, { "epoch": 1.8732394366197183, "grad_norm": 3.4275898933410645, "learning_rate": 9.96e-07, "loss": 3.0552, "step": 333 }, { "epoch": 1.8788732394366199, "grad_norm": 3.915097951889038, "learning_rate": 9.99e-07, "loss": 3.0403, "step": 334 }, { "epoch": 1.8845070422535213, "grad_norm": 3.5404648780822754, "learning_rate": 1.002e-06, "loss": 3.0184, "step": 335 }, { "epoch": 1.8901408450704227, "grad_norm": 1.993233561515808, "learning_rate": 1.0050000000000001e-06, "loss": 3.0271, "step": 336 }, { "epoch": 1.895774647887324, "grad_norm": 2.958256483078003, "learning_rate": 1.008e-06, "loss": 3.0009, "step": 337 }, { "epoch": 1.9014084507042255, "grad_norm": 2.1944117546081543, "learning_rate": 1.0110000000000001e-06, "loss": 2.973, "step": 338 }, { "epoch": 1.9070422535211269, "grad_norm": 4.562254905700684, "learning_rate": 1.014e-06, "loss": 3.0002, "step": 339 }, { "epoch": 1.9126760563380283, "grad_norm": 2.5491044521331787, "learning_rate": 1.017e-06, "loss": 3.0079, "step": 340 }, { "epoch": 1.9183098591549297, "grad_norm": 4.045207977294922, "learning_rate": 1.0200000000000002e-06, "loss": 2.9921, "step": 341 }, { "epoch": 1.923943661971831, "grad_norm": 4.1953864097595215, "learning_rate": 1.023e-06, "loss": 2.9992, "step": 342 }, { "epoch": 1.9295774647887325, "grad_norm": 2.195458173751831, "learning_rate": 1.026e-06, "loss": 3.0202, "step": 343 }, { "epoch": 1.935211267605634, "grad_norm": 6.150205135345459, "learning_rate": 1.029e-06, "loss": 2.9895, "step": 344 }, { "epoch": 1.9408450704225353, "grad_norm": 2.1709234714508057, "learning_rate": 1.032e-06, "loss": 3.026, "step": 345 }, { "epoch": 1.9464788732394367, "grad_norm": 3.657919406890869, "learning_rate": 1.035e-06, "loss": 2.9522, "step": 346 }, { "epoch": 1.952112676056338, "grad_norm": 5.459045886993408, "learning_rate": 1.038e-06, "loss": 2.9906, "step": 347 }, { "epoch": 1.9577464788732395, "grad_norm": 2.215078592300415, "learning_rate": 1.041e-06, "loss": 2.9721, "step": 348 }, { "epoch": 1.963380281690141, "grad_norm": 2.1503262519836426, "learning_rate": 1.0439999999999999e-06, "loss": 2.9818, "step": 349 }, { "epoch": 1.9690140845070423, "grad_norm": 21.535140991210938, "learning_rate": 1.047e-06, "loss": 2.9945, "step": 350 }, { "epoch": 1.9746478873239437, "grad_norm": 3.492640733718872, "learning_rate": 1.0500000000000001e-06, "loss": 2.9585, "step": 351 }, { "epoch": 1.980281690140845, "grad_norm": 10.216153144836426, "learning_rate": 1.053e-06, "loss": 3.0104, "step": 352 }, { "epoch": 1.9859154929577465, "grad_norm": 3.894277334213257, "learning_rate": 1.0560000000000001e-06, "loss": 2.9685, "step": 353 }, { "epoch": 1.991549295774648, "grad_norm": NaN, "learning_rate": 1.0560000000000001e-06, "loss": 3.016, "step": 354 }, { "epoch": 1.9971830985915493, "grad_norm": 2.764295816421509, "learning_rate": 1.059e-06, "loss": 3.0144, "step": 355 }, { "epoch": 2.0, "grad_norm": 2.5533089637756348, "learning_rate": 1.062e-06, "loss": 1.4893, "step": 356 }, { "epoch": 2.0056338028169014, "grad_norm": 6.689918518066406, "learning_rate": 1.065e-06, "loss": 3.156, "step": 357 }, { "epoch": 2.011267605633803, "grad_norm": 4.828878879547119, "learning_rate": 1.068e-06, "loss": 3.1063, "step": 358 }, { "epoch": 2.016901408450704, "grad_norm": 3.7969484329223633, "learning_rate": 1.0710000000000002e-06, "loss": 3.0965, "step": 359 }, { "epoch": 2.0225352112676056, "grad_norm": 3.629793405532837, "learning_rate": 1.074e-06, "loss": 3.0502, "step": 360 }, { "epoch": 2.028169014084507, "grad_norm": 4.136383533477783, "learning_rate": 1.077e-06, "loss": 3.0603, "step": 361 }, { "epoch": 2.0338028169014084, "grad_norm": 4.763708591461182, "learning_rate": 1.08e-06, "loss": 3.035, "step": 362 }, { "epoch": 2.03943661971831, "grad_norm": 2.662196397781372, "learning_rate": 1.083e-06, "loss": 2.979, "step": 363 }, { "epoch": 2.045070422535211, "grad_norm": 2.799008369445801, "learning_rate": 1.086e-06, "loss": 3.0423, "step": 364 }, { "epoch": 2.0507042253521126, "grad_norm": 3.0370540618896484, "learning_rate": 1.089e-06, "loss": 3.0191, "step": 365 }, { "epoch": 2.056338028169014, "grad_norm": 3.5550408363342285, "learning_rate": 1.092e-06, "loss": 2.9998, "step": 366 }, { "epoch": 2.0619718309859154, "grad_norm": 2.8191301822662354, "learning_rate": 1.0949999999999999e-06, "loss": 3.0373, "step": 367 }, { "epoch": 2.067605633802817, "grad_norm": 4.018879413604736, "learning_rate": 1.0980000000000001e-06, "loss": 2.9933, "step": 368 }, { "epoch": 2.073239436619718, "grad_norm": 3.813567876815796, "learning_rate": 1.1010000000000001e-06, "loss": 3.0079, "step": 369 }, { "epoch": 2.0788732394366196, "grad_norm": 2.6536049842834473, "learning_rate": 1.104e-06, "loss": 2.9907, "step": 370 }, { "epoch": 2.084507042253521, "grad_norm": 1.7705937623977661, "learning_rate": 1.1070000000000002e-06, "loss": 2.9996, "step": 371 }, { "epoch": 2.0901408450704224, "grad_norm": 5.244455814361572, "learning_rate": 1.11e-06, "loss": 3.0193, "step": 372 }, { "epoch": 2.095774647887324, "grad_norm": 4.140601634979248, "learning_rate": 1.113e-06, "loss": 2.9686, "step": 373 }, { "epoch": 2.101408450704225, "grad_norm": 2.9329001903533936, "learning_rate": 1.116e-06, "loss": 2.9843, "step": 374 }, { "epoch": 2.1070422535211266, "grad_norm": 1.8754574060440063, "learning_rate": 1.119e-06, "loss": 2.9626, "step": 375 }, { "epoch": 2.112676056338028, "grad_norm": 4.379260540008545, "learning_rate": 1.122e-06, "loss": 2.9652, "step": 376 }, { "epoch": 2.1183098591549294, "grad_norm": 3.4436750411987305, "learning_rate": 1.125e-06, "loss": 2.9727, "step": 377 }, { "epoch": 2.123943661971831, "grad_norm": 3.883599281311035, "learning_rate": 1.128e-06, "loss": 2.9619, "step": 378 }, { "epoch": 2.129577464788732, "grad_norm": 3.0950586795806885, "learning_rate": 1.131e-06, "loss": 2.9473, "step": 379 }, { "epoch": 2.1352112676056336, "grad_norm": 1.735630989074707, "learning_rate": 1.134e-06, "loss": 2.9603, "step": 380 }, { "epoch": 2.140845070422535, "grad_norm": 5.800061225891113, "learning_rate": 1.137e-06, "loss": 3.0003, "step": 381 }, { "epoch": 2.1464788732394364, "grad_norm": 3.413560152053833, "learning_rate": 1.14e-06, "loss": 2.9284, "step": 382 }, { "epoch": 2.152112676056338, "grad_norm": 2.977813482284546, "learning_rate": 1.1430000000000001e-06, "loss": 2.9381, "step": 383 }, { "epoch": 2.1577464788732392, "grad_norm": 2.820646047592163, "learning_rate": 1.146e-06, "loss": 2.9239, "step": 384 }, { "epoch": 2.1633802816901406, "grad_norm": 3.4326274394989014, "learning_rate": 1.1490000000000001e-06, "loss": 2.9261, "step": 385 }, { "epoch": 2.169014084507042, "grad_norm": 6.641375541687012, "learning_rate": 1.152e-06, "loss": 2.9679, "step": 386 }, { "epoch": 2.1746478873239434, "grad_norm": 5.253774166107178, "learning_rate": 1.155e-06, "loss": 2.9745, "step": 387 }, { "epoch": 2.1802816901408453, "grad_norm": 3.8959267139434814, "learning_rate": 1.1580000000000002e-06, "loss": 2.9117, "step": 388 }, { "epoch": 2.1859154929577467, "grad_norm": 1.9529342651367188, "learning_rate": 1.161e-06, "loss": 2.9216, "step": 389 }, { "epoch": 2.191549295774648, "grad_norm": 6.595493793487549, "learning_rate": 1.164e-06, "loss": 2.9176, "step": 390 }, { "epoch": 2.1971830985915495, "grad_norm": 3.104062557220459, "learning_rate": 1.167e-06, "loss": 2.9234, "step": 391 }, { "epoch": 2.202816901408451, "grad_norm": 2.850505828857422, "learning_rate": 1.17e-06, "loss": 2.9371, "step": 392 }, { "epoch": 2.2084507042253523, "grad_norm": 3.024420738220215, "learning_rate": 1.173e-06, "loss": 2.9335, "step": 393 }, { "epoch": 2.2140845070422537, "grad_norm": 3.7631072998046875, "learning_rate": 1.176e-06, "loss": 2.9129, "step": 394 }, { "epoch": 2.219718309859155, "grad_norm": 3.0405235290527344, "learning_rate": 1.179e-06, "loss": 2.9242, "step": 395 }, { "epoch": 2.2253521126760565, "grad_norm": 4.891364097595215, "learning_rate": 1.1819999999999999e-06, "loss": 2.9131, "step": 396 }, { "epoch": 2.230985915492958, "grad_norm": 2.57051420211792, "learning_rate": 1.185e-06, "loss": 2.8991, "step": 397 }, { "epoch": 2.2366197183098593, "grad_norm": 3.683042049407959, "learning_rate": 1.188e-06, "loss": 2.9211, "step": 398 }, { "epoch": 2.2422535211267607, "grad_norm": 2.8408870697021484, "learning_rate": 1.191e-06, "loss": 2.8893, "step": 399 }, { "epoch": 2.247887323943662, "grad_norm": 6.567075252532959, "learning_rate": 1.1940000000000001e-06, "loss": 2.9126, "step": 400 }, { "epoch": 2.2535211267605635, "grad_norm": 4.963144779205322, "learning_rate": 1.197e-06, "loss": 3.0861, "step": 401 }, { "epoch": 2.259154929577465, "grad_norm": 5.517329692840576, "learning_rate": 1.2000000000000002e-06, "loss": 3.0618, "step": 402 }, { "epoch": 2.2647887323943663, "grad_norm": 3.461358070373535, "learning_rate": 1.203e-06, "loss": 2.9884, "step": 403 }, { "epoch": 2.2704225352112677, "grad_norm": 5.536230087280273, "learning_rate": 1.206e-06, "loss": 2.9819, "step": 404 }, { "epoch": 2.276056338028169, "grad_norm": 3.172144651412964, "learning_rate": 1.2090000000000002e-06, "loss": 2.965, "step": 405 }, { "epoch": 2.2816901408450705, "grad_norm": 4.672169208526611, "learning_rate": 1.212e-06, "loss": 2.9963, "step": 406 }, { "epoch": 2.287323943661972, "grad_norm": 2.5767641067504883, "learning_rate": 1.215e-06, "loss": 2.9119, "step": 407 }, { "epoch": 2.2929577464788733, "grad_norm": 3.546799659729004, "learning_rate": 1.218e-06, "loss": 2.9451, "step": 408 }, { "epoch": 2.2985915492957747, "grad_norm": 2.6019978523254395, "learning_rate": 1.221e-06, "loss": 2.9306, "step": 409 }, { "epoch": 2.304225352112676, "grad_norm": 5.824201583862305, "learning_rate": 1.224e-06, "loss": 2.9445, "step": 410 }, { "epoch": 2.3098591549295775, "grad_norm": 3.3293278217315674, "learning_rate": 1.227e-06, "loss": 2.9351, "step": 411 }, { "epoch": 2.315492957746479, "grad_norm": 5.109325885772705, "learning_rate": 1.23e-06, "loss": 2.9134, "step": 412 }, { "epoch": 2.3211267605633803, "grad_norm": 3.5838801860809326, "learning_rate": 1.2329999999999999e-06, "loss": 2.9145, "step": 413 }, { "epoch": 2.3267605633802817, "grad_norm": 4.243381977081299, "learning_rate": 1.2360000000000001e-06, "loss": 2.905, "step": 414 }, { "epoch": 2.332394366197183, "grad_norm": 6.325119972229004, "learning_rate": 1.2390000000000001e-06, "loss": 2.9102, "step": 415 }, { "epoch": 2.3380281690140845, "grad_norm": 3.0729176998138428, "learning_rate": 1.242e-06, "loss": 2.8892, "step": 416 }, { "epoch": 2.343661971830986, "grad_norm": 5.03809118270874, "learning_rate": 1.2450000000000002e-06, "loss": 2.8839, "step": 417 }, { "epoch": 2.3492957746478873, "grad_norm": 2.4732346534729004, "learning_rate": 1.248e-06, "loss": 2.9064, "step": 418 }, { "epoch": 2.3549295774647887, "grad_norm": 3.957566499710083, "learning_rate": 1.251e-06, "loss": 2.8872, "step": 419 }, { "epoch": 2.36056338028169, "grad_norm": 3.1195902824401855, "learning_rate": 1.254e-06, "loss": 2.8886, "step": 420 }, { "epoch": 2.3661971830985915, "grad_norm": 4.334029674530029, "learning_rate": 1.257e-06, "loss": 2.873, "step": 421 }, { "epoch": 2.371830985915493, "grad_norm": 2.1096246242523193, "learning_rate": 1.26e-06, "loss": 2.8649, "step": 422 }, { "epoch": 2.3774647887323943, "grad_norm": 4.480950355529785, "learning_rate": 1.263e-06, "loss": 2.8852, "step": 423 }, { "epoch": 2.3830985915492957, "grad_norm": 4.877039909362793, "learning_rate": 1.266e-06, "loss": 2.8639, "step": 424 }, { "epoch": 2.388732394366197, "grad_norm": 3.921311140060425, "learning_rate": 1.269e-06, "loss": 2.8589, "step": 425 }, { "epoch": 2.3943661971830985, "grad_norm": 2.2594597339630127, "learning_rate": 1.272e-06, "loss": 2.8935, "step": 426 }, { "epoch": 2.4, "grad_norm": 4.150793075561523, "learning_rate": 1.275e-06, "loss": 2.8623, "step": 427 }, { "epoch": 2.4056338028169013, "grad_norm": 2.5821540355682373, "learning_rate": 1.278e-06, "loss": 2.8597, "step": 428 }, { "epoch": 2.4112676056338027, "grad_norm": 3.32155704498291, "learning_rate": 1.281e-06, "loss": 2.8644, "step": 429 }, { "epoch": 2.416901408450704, "grad_norm": 2.0583064556121826, "learning_rate": 1.284e-06, "loss": 2.8809, "step": 430 }, { "epoch": 2.4225352112676055, "grad_norm": 2.4684250354766846, "learning_rate": 1.2870000000000001e-06, "loss": 2.8583, "step": 431 }, { "epoch": 2.428169014084507, "grad_norm": 2.8934361934661865, "learning_rate": 1.29e-06, "loss": 2.8812, "step": 432 }, { "epoch": 2.4338028169014083, "grad_norm": 3.3147571086883545, "learning_rate": 1.293e-06, "loss": 2.8513, "step": 433 }, { "epoch": 2.4394366197183097, "grad_norm": 3.118459701538086, "learning_rate": 1.2960000000000002e-06, "loss": 2.8737, "step": 434 }, { "epoch": 2.445070422535211, "grad_norm": 5.725314140319824, "learning_rate": 1.299e-06, "loss": 2.8238, "step": 435 }, { "epoch": 2.4507042253521125, "grad_norm": 2.1813156604766846, "learning_rate": 1.302e-06, "loss": 2.827, "step": 436 }, { "epoch": 2.456338028169014, "grad_norm": 2.7385337352752686, "learning_rate": 1.305e-06, "loss": 2.8563, "step": 437 }, { "epoch": 2.4619718309859153, "grad_norm": 4.738112926483154, "learning_rate": 1.308e-06, "loss": 2.822, "step": 438 }, { "epoch": 2.4676056338028167, "grad_norm": 3.1284008026123047, "learning_rate": 1.311e-06, "loss": 2.8628, "step": 439 }, { "epoch": 2.473239436619718, "grad_norm": 3.920558452606201, "learning_rate": 1.314e-06, "loss": 2.8456, "step": 440 }, { "epoch": 2.4788732394366195, "grad_norm": 3.4516639709472656, "learning_rate": 1.317e-06, "loss": 2.8238, "step": 441 }, { "epoch": 2.4845070422535214, "grad_norm": 2.7140746116638184, "learning_rate": 1.3199999999999999e-06, "loss": 2.8164, "step": 442 }, { "epoch": 2.4901408450704228, "grad_norm": 3.0408647060394287, "learning_rate": 1.323e-06, "loss": 2.8268, "step": 443 }, { "epoch": 2.495774647887324, "grad_norm": 5.611374378204346, "learning_rate": 1.326e-06, "loss": 2.8166, "step": 444 }, { "epoch": 2.5014084507042256, "grad_norm": 4.1387619972229, "learning_rate": 1.3290000000000001e-06, "loss": 3.0038, "step": 445 }, { "epoch": 2.507042253521127, "grad_norm": 3.045279026031494, "learning_rate": 1.3320000000000001e-06, "loss": 2.9354, "step": 446 }, { "epoch": 2.5126760563380284, "grad_norm": 2.492701530456543, "learning_rate": 1.335e-06, "loss": 2.9068, "step": 447 }, { "epoch": 2.5183098591549298, "grad_norm": 3.7552857398986816, "learning_rate": 1.3380000000000001e-06, "loss": 2.8962, "step": 448 }, { "epoch": 2.523943661971831, "grad_norm": 3.1780288219451904, "learning_rate": 1.341e-06, "loss": 2.869, "step": 449 }, { "epoch": 2.5295774647887326, "grad_norm": 3.949904680252075, "learning_rate": 1.344e-06, "loss": 2.8825, "step": 450 }, { "epoch": 2.535211267605634, "grad_norm": 1.9001998901367188, "learning_rate": 1.3470000000000002e-06, "loss": 2.8544, "step": 451 }, { "epoch": 2.5408450704225354, "grad_norm": 3.131744384765625, "learning_rate": 1.35e-06, "loss": 2.8851, "step": 452 }, { "epoch": 2.546478873239437, "grad_norm": 3.6800670623779297, "learning_rate": 1.353e-06, "loss": 2.8998, "step": 453 }, { "epoch": 2.552112676056338, "grad_norm": 3.9882307052612305, "learning_rate": 1.356e-06, "loss": 2.8831, "step": 454 }, { "epoch": 2.5577464788732396, "grad_norm": 2.457385778427124, "learning_rate": 1.359e-06, "loss": 2.8616, "step": 455 }, { "epoch": 2.563380281690141, "grad_norm": 2.2192134857177734, "learning_rate": 1.362e-06, "loss": 2.8799, "step": 456 }, { "epoch": 2.5690140845070424, "grad_norm": 3.6589062213897705, "learning_rate": 1.365e-06, "loss": 2.8637, "step": 457 }, { "epoch": 2.574647887323944, "grad_norm": 2.2132861614227295, "learning_rate": 1.368e-06, "loss": 2.8135, "step": 458 }, { "epoch": 2.580281690140845, "grad_norm": 2.988835334777832, "learning_rate": 1.3709999999999999e-06, "loss": 2.8356, "step": 459 }, { "epoch": 2.5859154929577466, "grad_norm": 2.2689504623413086, "learning_rate": 1.374e-06, "loss": 2.8513, "step": 460 }, { "epoch": 2.591549295774648, "grad_norm": 2.1941077709198, "learning_rate": 1.3770000000000001e-06, "loss": 2.8369, "step": 461 }, { "epoch": 2.5971830985915494, "grad_norm": 1.760879635810852, "learning_rate": 1.38e-06, "loss": 2.8273, "step": 462 }, { "epoch": 2.602816901408451, "grad_norm": 4.897495269775391, "learning_rate": 1.3830000000000001e-06, "loss": 2.8198, "step": 463 }, { "epoch": 2.608450704225352, "grad_norm": 4.290358543395996, "learning_rate": 1.386e-06, "loss": 2.8371, "step": 464 }, { "epoch": 2.6140845070422536, "grad_norm": 3.481776714324951, "learning_rate": 1.389e-06, "loss": 2.8027, "step": 465 }, { "epoch": 2.619718309859155, "grad_norm": 2.979186773300171, "learning_rate": 1.392e-06, "loss": 2.8286, "step": 466 }, { "epoch": 2.6253521126760564, "grad_norm": 4.169500350952148, "learning_rate": 1.395e-06, "loss": 2.8278, "step": 467 }, { "epoch": 2.630985915492958, "grad_norm": 5.333034515380859, "learning_rate": 1.3980000000000002e-06, "loss": 2.8301, "step": 468 }, { "epoch": 2.636619718309859, "grad_norm": 6.221430778503418, "learning_rate": 1.401e-06, "loss": 2.8066, "step": 469 }, { "epoch": 2.6422535211267606, "grad_norm": 4.619940757751465, "learning_rate": 1.404e-06, "loss": 2.7916, "step": 470 }, { "epoch": 2.647887323943662, "grad_norm": 5.482257843017578, "learning_rate": 1.407e-06, "loss": 2.806, "step": 471 }, { "epoch": 2.6535211267605634, "grad_norm": 4.32538366317749, "learning_rate": 1.41e-06, "loss": 2.799, "step": 472 }, { "epoch": 2.659154929577465, "grad_norm": 5.186101913452148, "learning_rate": 1.413e-06, "loss": 2.7999, "step": 473 }, { "epoch": 2.664788732394366, "grad_norm": 6.246681213378906, "learning_rate": 1.416e-06, "loss": 2.7924, "step": 474 }, { "epoch": 2.6704225352112676, "grad_norm": 5.235856056213379, "learning_rate": 1.419e-06, "loss": 2.7918, "step": 475 }, { "epoch": 2.676056338028169, "grad_norm": 2.9274845123291016, "learning_rate": 1.422e-06, "loss": 2.8112, "step": 476 }, { "epoch": 2.6816901408450704, "grad_norm": 3.573071241378784, "learning_rate": 1.4250000000000001e-06, "loss": 2.7849, "step": 477 }, { "epoch": 2.687323943661972, "grad_norm": 4.214164733886719, "learning_rate": 1.4280000000000001e-06, "loss": 2.7879, "step": 478 }, { "epoch": 2.692957746478873, "grad_norm": 5.562944412231445, "learning_rate": 1.431e-06, "loss": 2.7879, "step": 479 }, { "epoch": 2.6985915492957746, "grad_norm": 5.23793888092041, "learning_rate": 1.4340000000000002e-06, "loss": 2.7858, "step": 480 }, { "epoch": 2.704225352112676, "grad_norm": 4.238168716430664, "learning_rate": 1.437e-06, "loss": 2.7769, "step": 481 }, { "epoch": 2.7098591549295774, "grad_norm": 4.448953628540039, "learning_rate": 1.44e-06, "loss": 2.7617, "step": 482 }, { "epoch": 2.715492957746479, "grad_norm": 7.470413684844971, "learning_rate": 1.443e-06, "loss": 2.7972, "step": 483 }, { "epoch": 2.72112676056338, "grad_norm": 3.1278200149536133, "learning_rate": 1.446e-06, "loss": 2.7428, "step": 484 }, { "epoch": 2.7267605633802816, "grad_norm": 3.5966947078704834, "learning_rate": 1.449e-06, "loss": 2.774, "step": 485 }, { "epoch": 2.732394366197183, "grad_norm": 3.109821319580078, "learning_rate": 1.452e-06, "loss": 2.7867, "step": 486 }, { "epoch": 2.7380281690140844, "grad_norm": 4.992013931274414, "learning_rate": 1.455e-06, "loss": 2.7909, "step": 487 }, { "epoch": 2.743661971830986, "grad_norm": 5.205508232116699, "learning_rate": 1.458e-06, "loss": 2.742, "step": 488 }, { "epoch": 2.749295774647887, "grad_norm": 3.8282198905944824, "learning_rate": 1.461e-06, "loss": 2.9147, "step": 489 }, { "epoch": 2.7549295774647886, "grad_norm": 3.4786813259124756, "learning_rate": 1.464e-06, "loss": 2.9016, "step": 490 }, { "epoch": 2.76056338028169, "grad_norm": 4.716727256774902, "learning_rate": 1.467e-06, "loss": 2.8472, "step": 491 }, { "epoch": 2.7661971830985914, "grad_norm": 2.4909396171569824, "learning_rate": 1.4700000000000001e-06, "loss": 2.8292, "step": 492 }, { "epoch": 2.771830985915493, "grad_norm": 2.098987102508545, "learning_rate": 1.473e-06, "loss": 2.8354, "step": 493 }, { "epoch": 2.777464788732394, "grad_norm": 3.8950672149658203, "learning_rate": 1.4760000000000001e-06, "loss": 2.8584, "step": 494 }, { "epoch": 2.7830985915492956, "grad_norm": 3.389186382293701, "learning_rate": 1.479e-06, "loss": 2.8431, "step": 495 }, { "epoch": 2.788732394366197, "grad_norm": 2.1209559440612793, "learning_rate": 1.482e-06, "loss": 2.8189, "step": 496 }, { "epoch": 2.7943661971830984, "grad_norm": 2.8957386016845703, "learning_rate": 1.4850000000000002e-06, "loss": 2.8003, "step": 497 }, { "epoch": 2.8, "grad_norm": 1.9849811792373657, "learning_rate": 1.488e-06, "loss": 2.7981, "step": 498 }, { "epoch": 2.8056338028169012, "grad_norm": 2.376265287399292, "learning_rate": 1.491e-06, "loss": 2.8231, "step": 499 }, { "epoch": 2.8112676056338026, "grad_norm": 3.4236159324645996, "learning_rate": 1.494e-06, "loss": 2.8096, "step": 500 }, { "epoch": 2.816901408450704, "grad_norm": 2.7298974990844727, "learning_rate": 1.497e-06, "loss": 2.8237, "step": 501 }, { "epoch": 2.8225352112676054, "grad_norm": 2.521082639694214, "learning_rate": 1.5e-06, "loss": 2.765, "step": 502 }, { "epoch": 2.828169014084507, "grad_norm": 2.644693613052368, "learning_rate": 1.503e-06, "loss": 2.8049, "step": 503 }, { "epoch": 2.8338028169014082, "grad_norm": 3.143251419067383, "learning_rate": 1.506e-06, "loss": 2.7773, "step": 504 }, { "epoch": 2.8394366197183096, "grad_norm": 2.4421496391296387, "learning_rate": 1.5089999999999999e-06, "loss": 2.7613, "step": 505 }, { "epoch": 2.845070422535211, "grad_norm": 3.1848864555358887, "learning_rate": 1.512e-06, "loss": 2.7795, "step": 506 }, { "epoch": 2.8507042253521124, "grad_norm": 2.911860704421997, "learning_rate": 1.5150000000000001e-06, "loss": 2.7608, "step": 507 }, { "epoch": 2.856338028169014, "grad_norm": 1.8944320678710938, "learning_rate": 1.518e-06, "loss": 2.7835, "step": 508 }, { "epoch": 2.8619718309859152, "grad_norm": 2.056074380874634, "learning_rate": 1.5210000000000001e-06, "loss": 2.7737, "step": 509 }, { "epoch": 2.8676056338028166, "grad_norm": 3.7578797340393066, "learning_rate": 1.524e-06, "loss": 2.7783, "step": 510 }, { "epoch": 2.873239436619718, "grad_norm": 2.587336778640747, "learning_rate": 1.5270000000000002e-06, "loss": 2.7797, "step": 511 }, { "epoch": 2.87887323943662, "grad_norm": 5.611008644104004, "learning_rate": 1.53e-06, "loss": 2.744, "step": 512 }, { "epoch": 2.8845070422535213, "grad_norm": 10.416324615478516, "learning_rate": 1.533e-06, "loss": 2.7366, "step": 513 }, { "epoch": 2.8901408450704227, "grad_norm": 4.064530849456787, "learning_rate": 1.5360000000000002e-06, "loss": 2.7416, "step": 514 }, { "epoch": 2.895774647887324, "grad_norm": 6.801990032196045, "learning_rate": 1.539e-06, "loss": 2.7769, "step": 515 }, { "epoch": 2.9014084507042255, "grad_norm": 2.629598379135132, "learning_rate": 1.542e-06, "loss": 2.744, "step": 516 }, { "epoch": 2.907042253521127, "grad_norm": 4.535959720611572, "learning_rate": 1.545e-06, "loss": 2.7681, "step": 517 }, { "epoch": 2.9126760563380283, "grad_norm": 5.370257377624512, "learning_rate": 1.548e-06, "loss": 2.7789, "step": 518 }, { "epoch": 2.9183098591549297, "grad_norm": 5.6404852867126465, "learning_rate": 1.551e-06, "loss": 2.7694, "step": 519 }, { "epoch": 2.923943661971831, "grad_norm": 3.5447475910186768, "learning_rate": 1.554e-06, "loss": 2.7573, "step": 520 }, { "epoch": 2.9295774647887325, "grad_norm": 3.844684362411499, "learning_rate": 1.557e-06, "loss": 2.7499, "step": 521 }, { "epoch": 2.935211267605634, "grad_norm": 39.249507904052734, "learning_rate": 1.5599999999999999e-06, "loss": 2.7408, "step": 522 }, { "epoch": 2.9408450704225353, "grad_norm": 2.2351744174957275, "learning_rate": 1.5630000000000001e-06, "loss": 2.7732, "step": 523 }, { "epoch": 2.9464788732394367, "grad_norm": 4.3039870262146, "learning_rate": 1.5660000000000001e-06, "loss": 2.7418, "step": 524 }, { "epoch": 2.952112676056338, "grad_norm": 8.005955696105957, "learning_rate": 1.569e-06, "loss": 2.699, "step": 525 }, { "epoch": 2.9577464788732395, "grad_norm": 4.523921489715576, "learning_rate": 1.5720000000000002e-06, "loss": 2.7148, "step": 526 }, { "epoch": 2.963380281690141, "grad_norm": 8.109228134155273, "learning_rate": 1.575e-06, "loss": 2.7359, "step": 527 }, { "epoch": 2.9690140845070423, "grad_norm": 54.502098083496094, "learning_rate": 1.578e-06, "loss": 2.753, "step": 528 }, { "epoch": 2.9746478873239437, "grad_norm": 4.717286586761475, "learning_rate": 1.581e-06, "loss": 2.7562, "step": 529 }, { "epoch": 2.980281690140845, "grad_norm": 17.204912185668945, "learning_rate": 1.584e-06, "loss": 2.8052, "step": 530 }, { "epoch": 2.9859154929577465, "grad_norm": 6.698378562927246, "learning_rate": 1.5870000000000002e-06, "loss": 2.7627, "step": 531 }, { "epoch": 2.991549295774648, "grad_norm": 9.332170486450195, "learning_rate": 1.59e-06, "loss": 2.7683, "step": 532 }, { "epoch": 2.9971830985915493, "grad_norm": 2.9608309268951416, "learning_rate": 1.593e-06, "loss": 2.8084, "step": 533 }, { "epoch": 3.0, "grad_norm": 3.3101515769958496, "learning_rate": 1.596e-06, "loss": 1.3821, "step": 534 }, { "epoch": 3.0056338028169014, "grad_norm": 4.513636112213135, "learning_rate": 1.599e-06, "loss": 2.8764, "step": 535 }, { "epoch": 3.011267605633803, "grad_norm": 6.628481864929199, "learning_rate": 1.602e-06, "loss": 2.8876, "step": 536 }, { "epoch": 3.016901408450704, "grad_norm": 2.4313488006591797, "learning_rate": 1.605e-06, "loss": 2.8239, "step": 537 }, { "epoch": 3.0225352112676056, "grad_norm": 3.9404282569885254, "learning_rate": 1.608e-06, "loss": 2.8526, "step": 538 }, { "epoch": 3.028169014084507, "grad_norm": 2.381598949432373, "learning_rate": 1.611e-06, "loss": 2.8307, "step": 539 }, { "epoch": 3.0338028169014084, "grad_norm": 3.1737258434295654, "learning_rate": 1.6140000000000001e-06, "loss": 2.8175, "step": 540 }, { "epoch": 3.03943661971831, "grad_norm": 2.4474072456359863, "learning_rate": 1.6170000000000001e-06, "loss": 2.792, "step": 541 }, { "epoch": 3.045070422535211, "grad_norm": 4.117803573608398, "learning_rate": 1.62e-06, "loss": 2.7852, "step": 542 }, { "epoch": 3.0507042253521126, "grad_norm": 2.7527599334716797, "learning_rate": 1.6230000000000002e-06, "loss": 2.7904, "step": 543 }, { "epoch": 3.056338028169014, "grad_norm": 1.7446880340576172, "learning_rate": 1.626e-06, "loss": 2.7979, "step": 544 }, { "epoch": 3.0619718309859154, "grad_norm": 2.72400164604187, "learning_rate": 1.629e-06, "loss": 2.7947, "step": 545 }, { "epoch": 3.067605633802817, "grad_norm": 2.1043014526367188, "learning_rate": 1.632e-06, "loss": 2.7753, "step": 546 }, { "epoch": 3.073239436619718, "grad_norm": 2.054772138595581, "learning_rate": 1.635e-06, "loss": 2.7834, "step": 547 }, { "epoch": 3.0788732394366196, "grad_norm": 2.433123826980591, "learning_rate": 1.638e-06, "loss": 2.7527, "step": 548 }, { "epoch": 3.084507042253521, "grad_norm": 4.187600135803223, "learning_rate": 1.641e-06, "loss": 2.7706, "step": 549 }, { "epoch": 3.0901408450704224, "grad_norm": 1.6656733751296997, "learning_rate": 1.644e-06, "loss": 2.7446, "step": 550 }, { "epoch": 3.095774647887324, "grad_norm": 1.757063388824463, "learning_rate": 1.6469999999999999e-06, "loss": 2.729, "step": 551 }, { "epoch": 3.101408450704225, "grad_norm": 2.2468183040618896, "learning_rate": 1.65e-06, "loss": 2.7629, "step": 552 }, { "epoch": 3.1070422535211266, "grad_norm": 2.3012444972991943, "learning_rate": 1.653e-06, "loss": 2.7679, "step": 553 }, { "epoch": 3.112676056338028, "grad_norm": 3.9080018997192383, "learning_rate": 1.6560000000000001e-06, "loss": 2.7438, "step": 554 }, { "epoch": 3.1183098591549294, "grad_norm": 2.3420796394348145, "learning_rate": 1.6590000000000001e-06, "loss": 2.7693, "step": 555 }, { "epoch": 3.123943661971831, "grad_norm": 3.211016893386841, "learning_rate": 1.662e-06, "loss": 2.7565, "step": 556 }, { "epoch": 3.129577464788732, "grad_norm": 4.163708209991455, "learning_rate": 1.6650000000000002e-06, "loss": 2.7658, "step": 557 }, { "epoch": 3.1352112676056336, "grad_norm": 3.493562698364258, "learning_rate": 1.668e-06, "loss": 2.7368, "step": 558 }, { "epoch": 3.140845070422535, "grad_norm": 2.458523988723755, "learning_rate": 1.671e-06, "loss": 2.7076, "step": 559 }, { "epoch": 3.1464788732394364, "grad_norm": 3.26328182220459, "learning_rate": 1.6740000000000002e-06, "loss": 2.6912, "step": 560 }, { "epoch": 3.152112676056338, "grad_norm": 2.4104485511779785, "learning_rate": 1.677e-06, "loss": 2.7585, "step": 561 }, { "epoch": 3.1577464788732392, "grad_norm": 2.657015800476074, "learning_rate": 1.68e-06, "loss": 2.7099, "step": 562 }, { "epoch": 3.1633802816901406, "grad_norm": 2.3434484004974365, "learning_rate": 1.683e-06, "loss": 2.744, "step": 563 }, { "epoch": 3.169014084507042, "grad_norm": 3.666388988494873, "learning_rate": 1.686e-06, "loss": 2.7251, "step": 564 }, { "epoch": 3.1746478873239434, "grad_norm": 3.7017822265625, "learning_rate": 1.689e-06, "loss": 2.7218, "step": 565 }, { "epoch": 3.1802816901408453, "grad_norm": 6.779127597808838, "learning_rate": 1.692e-06, "loss": 2.7156, "step": 566 }, { "epoch": 3.1859154929577467, "grad_norm": 4.765157222747803, "learning_rate": 1.695e-06, "loss": 2.7168, "step": 567 }, { "epoch": 3.191549295774648, "grad_norm": 3.8683316707611084, "learning_rate": 1.6979999999999999e-06, "loss": 2.6945, "step": 568 }, { "epoch": 3.1971830985915495, "grad_norm": 2.9999983310699463, "learning_rate": 1.701e-06, "loss": 2.7323, "step": 569 }, { "epoch": 3.202816901408451, "grad_norm": 2.4626176357269287, "learning_rate": 1.7040000000000001e-06, "loss": 2.7142, "step": 570 }, { "epoch": 3.2084507042253523, "grad_norm": 4.122666358947754, "learning_rate": 1.707e-06, "loss": 2.6754, "step": 571 }, { "epoch": 3.2140845070422537, "grad_norm": 3.5713374614715576, "learning_rate": 1.7100000000000001e-06, "loss": 2.6513, "step": 572 }, { "epoch": 3.219718309859155, "grad_norm": 4.797949314117432, "learning_rate": 1.713e-06, "loss": 2.688, "step": 573 }, { "epoch": 3.2253521126760565, "grad_norm": 3.065605640411377, "learning_rate": 1.7160000000000002e-06, "loss": 2.6772, "step": 574 }, { "epoch": 3.230985915492958, "grad_norm": 2.238821029663086, "learning_rate": 1.719e-06, "loss": 2.6712, "step": 575 }, { "epoch": 3.2366197183098593, "grad_norm": 2.4874846935272217, "learning_rate": 1.722e-06, "loss": 2.7081, "step": 576 }, { "epoch": 3.2422535211267607, "grad_norm": 2.7824885845184326, "learning_rate": 1.7250000000000002e-06, "loss": 2.7113, "step": 577 }, { "epoch": 3.247887323943662, "grad_norm": 3.1023831367492676, "learning_rate": 1.728e-06, "loss": 2.6572, "step": 578 }, { "epoch": 3.2535211267605635, "grad_norm": 5.776034832000732, "learning_rate": 1.731e-06, "loss": 2.8611, "step": 579 }, { "epoch": 3.259154929577465, "grad_norm": 2.806821823120117, "learning_rate": 1.734e-06, "loss": 2.8481, "step": 580 }, { "epoch": 3.2647887323943663, "grad_norm": 3.0129337310791016, "learning_rate": 1.737e-06, "loss": 2.8037, "step": 581 }, { "epoch": 3.2704225352112677, "grad_norm": 5.805482387542725, "learning_rate": 1.74e-06, "loss": 2.8091, "step": 582 }, { "epoch": 3.276056338028169, "grad_norm": 9.397015571594238, "learning_rate": 1.743e-06, "loss": 2.8086, "step": 583 }, { "epoch": 3.2816901408450705, "grad_norm": 2.8330020904541016, "learning_rate": 1.746e-06, "loss": 2.7994, "step": 584 }, { "epoch": 3.287323943661972, "grad_norm": 3.208578109741211, "learning_rate": 1.749e-06, "loss": 2.7591, "step": 585 }, { "epoch": 3.2929577464788733, "grad_norm": 4.422690391540527, "learning_rate": 1.7520000000000001e-06, "loss": 2.7969, "step": 586 }, { "epoch": 3.2985915492957747, "grad_norm": 2.6905901432037354, "learning_rate": 1.7550000000000001e-06, "loss": 2.7611, "step": 587 }, { "epoch": 3.304225352112676, "grad_norm": 4.492880821228027, "learning_rate": 1.758e-06, "loss": 2.7692, "step": 588 }, { "epoch": 3.3098591549295775, "grad_norm": 2.1210246086120605, "learning_rate": 1.7610000000000002e-06, "loss": 2.7765, "step": 589 }, { "epoch": 3.315492957746479, "grad_norm": 2.7414491176605225, "learning_rate": 1.764e-06, "loss": 2.7318, "step": 590 }, { "epoch": 3.3211267605633803, "grad_norm": 2.884840965270996, "learning_rate": 1.767e-06, "loss": 2.7482, "step": 591 }, { "epoch": 3.3267605633802817, "grad_norm": 3.144826650619507, "learning_rate": 1.77e-06, "loss": 2.7188, "step": 592 }, { "epoch": 3.332394366197183, "grad_norm": 2.618098020553589, "learning_rate": 1.773e-06, "loss": 2.7531, "step": 593 }, { "epoch": 3.3380281690140845, "grad_norm": 1.7363290786743164, "learning_rate": 1.776e-06, "loss": 2.7328, "step": 594 }, { "epoch": 3.343661971830986, "grad_norm": 3.7917160987854004, "learning_rate": 1.779e-06, "loss": 2.7158, "step": 595 }, { "epoch": 3.3492957746478873, "grad_norm": 3.086524248123169, "learning_rate": 1.782e-06, "loss": 2.7073, "step": 596 }, { "epoch": 3.3549295774647887, "grad_norm": 1.768385887145996, "learning_rate": 1.785e-06, "loss": 2.7167, "step": 597 }, { "epoch": 3.36056338028169, "grad_norm": 3.622553586959839, "learning_rate": 1.788e-06, "loss": 2.7171, "step": 598 }, { "epoch": 3.3661971830985915, "grad_norm": 2.9177920818328857, "learning_rate": 1.791e-06, "loss": 2.7344, "step": 599 }, { "epoch": 3.371830985915493, "grad_norm": 2.422119140625, "learning_rate": 1.794e-06, "loss": 2.6836, "step": 600 }, { "epoch": 3.3774647887323943, "grad_norm": 3.2521135807037354, "learning_rate": 1.7970000000000001e-06, "loss": 2.6564, "step": 601 }, { "epoch": 3.3830985915492957, "grad_norm": 3.1682698726654053, "learning_rate": 1.8e-06, "loss": 2.6881, "step": 602 }, { "epoch": 3.388732394366197, "grad_norm": 2.0565860271453857, "learning_rate": 1.8030000000000001e-06, "loss": 2.681, "step": 603 }, { "epoch": 3.3943661971830985, "grad_norm": 5.913877964019775, "learning_rate": 1.806e-06, "loss": 2.66, "step": 604 }, { "epoch": 3.4, "grad_norm": 5.101724624633789, "learning_rate": 1.809e-06, "loss": 2.7031, "step": 605 }, { "epoch": 3.4056338028169013, "grad_norm": 4.672321319580078, "learning_rate": 1.8120000000000002e-06, "loss": 2.6847, "step": 606 }, { "epoch": 3.4112676056338027, "grad_norm": 3.59171462059021, "learning_rate": 1.815e-06, "loss": 2.6721, "step": 607 }, { "epoch": 3.416901408450704, "grad_norm": 4.02622652053833, "learning_rate": 1.818e-06, "loss": 2.6493, "step": 608 }, { "epoch": 3.4225352112676055, "grad_norm": 6.657413959503174, "learning_rate": 1.821e-06, "loss": 2.6754, "step": 609 }, { "epoch": 3.428169014084507, "grad_norm": 6.219770908355713, "learning_rate": 1.824e-06, "loss": 2.6491, "step": 610 }, { "epoch": 3.4338028169014083, "grad_norm": 4.524576187133789, "learning_rate": 1.827e-06, "loss": 2.676, "step": 611 }, { "epoch": 3.4394366197183097, "grad_norm": 2.754516363143921, "learning_rate": 1.83e-06, "loss": 2.6256, "step": 612 }, { "epoch": 3.445070422535211, "grad_norm": 7.4485979080200195, "learning_rate": 1.833e-06, "loss": 2.6068, "step": 613 }, { "epoch": 3.4507042253521125, "grad_norm": 5.208416938781738, "learning_rate": 1.8359999999999999e-06, "loss": 2.6381, "step": 614 }, { "epoch": 3.456338028169014, "grad_norm": 5.324192047119141, "learning_rate": 1.839e-06, "loss": 2.6656, "step": 615 }, { "epoch": 3.4619718309859153, "grad_norm": 3.2816479206085205, "learning_rate": 1.8420000000000001e-06, "loss": 2.6474, "step": 616 }, { "epoch": 3.4676056338028167, "grad_norm": 7.767096519470215, "learning_rate": 1.8450000000000001e-06, "loss": 2.6936, "step": 617 }, { "epoch": 3.473239436619718, "grad_norm": 3.7725744247436523, "learning_rate": 1.8480000000000001e-06, "loss": 2.6174, "step": 618 }, { "epoch": 3.4788732394366195, "grad_norm": 5.0832905769348145, "learning_rate": 1.851e-06, "loss": 2.6354, "step": 619 }, { "epoch": 3.4845070422535214, "grad_norm": 5.80606746673584, "learning_rate": 1.8540000000000002e-06, "loss": 2.5779, "step": 620 }, { "epoch": 3.4901408450704228, "grad_norm": 3.8659050464630127, "learning_rate": 1.857e-06, "loss": 2.5956, "step": 621 }, { "epoch": 3.495774647887324, "grad_norm": 5.184934616088867, "learning_rate": 1.86e-06, "loss": 2.6341, "step": 622 }, { "epoch": 3.5014084507042256, "grad_norm": 4.191617488861084, "learning_rate": 1.8630000000000002e-06, "loss": 2.8809, "step": 623 }, { "epoch": 3.507042253521127, "grad_norm": 5.942683219909668, "learning_rate": 1.866e-06, "loss": 2.7936, "step": 624 }, { "epoch": 3.5126760563380284, "grad_norm": 3.73496413230896, "learning_rate": 1.869e-06, "loss": 2.7653, "step": 625 }, { "epoch": 3.5183098591549298, "grad_norm": 4.427114009857178, "learning_rate": 1.872e-06, "loss": 2.7579, "step": 626 }, { "epoch": 3.523943661971831, "grad_norm": 7.355522632598877, "learning_rate": 1.875e-06, "loss": 2.7391, "step": 627 }, { "epoch": 3.5295774647887326, "grad_norm": 5.933565616607666, "learning_rate": 1.878e-06, "loss": 2.7299, "step": 628 }, { "epoch": 3.535211267605634, "grad_norm": 2.801149845123291, "learning_rate": 1.8810000000000003e-06, "loss": 2.7234, "step": 629 }, { "epoch": 3.5408450704225354, "grad_norm": 3.7464067935943604, "learning_rate": 1.8839999999999999e-06, "loss": 2.7146, "step": 630 }, { "epoch": 3.546478873239437, "grad_norm": 4.548725605010986, "learning_rate": 1.8869999999999999e-06, "loss": 2.7071, "step": 631 }, { "epoch": 3.552112676056338, "grad_norm": 5.175673961639404, "learning_rate": 1.8900000000000001e-06, "loss": 2.7026, "step": 632 }, { "epoch": 3.5577464788732396, "grad_norm": 2.5320608615875244, "learning_rate": 1.8930000000000001e-06, "loss": 2.7358, "step": 633 }, { "epoch": 3.563380281690141, "grad_norm": 2.2658591270446777, "learning_rate": 1.8960000000000001e-06, "loss": 2.693, "step": 634 }, { "epoch": 3.5690140845070424, "grad_norm": 2.9923315048217773, "learning_rate": 1.899e-06, "loss": 2.6586, "step": 635 }, { "epoch": 3.574647887323944, "grad_norm": 1.8636302947998047, "learning_rate": 1.902e-06, "loss": 2.6489, "step": 636 }, { "epoch": 3.580281690140845, "grad_norm": 2.93691349029541, "learning_rate": 1.905e-06, "loss": 2.6609, "step": 637 }, { "epoch": 3.5859154929577466, "grad_norm": 3.5617032051086426, "learning_rate": 1.908e-06, "loss": 2.659, "step": 638 }, { "epoch": 3.591549295774648, "grad_norm": 3.435441732406616, "learning_rate": 1.9110000000000004e-06, "loss": 2.6154, "step": 639 }, { "epoch": 3.5971830985915494, "grad_norm": 4.196032524108887, "learning_rate": 1.914e-06, "loss": 2.616, "step": 640 }, { "epoch": 3.602816901408451, "grad_norm": 3.23492169380188, "learning_rate": 1.917e-06, "loss": 2.6048, "step": 641 }, { "epoch": 3.608450704225352, "grad_norm": 2.949294328689575, "learning_rate": 1.9200000000000003e-06, "loss": 2.598, "step": 642 }, { "epoch": 3.6140845070422536, "grad_norm": 2.844964027404785, "learning_rate": 1.923e-06, "loss": 2.6041, "step": 643 }, { "epoch": 3.619718309859155, "grad_norm": 3.5030791759490967, "learning_rate": 1.926e-06, "loss": 2.5685, "step": 644 }, { "epoch": 3.6253521126760564, "grad_norm": 10.395556449890137, "learning_rate": 1.929e-06, "loss": 2.576, "step": 645 }, { "epoch": 3.630985915492958, "grad_norm": 2.640410900115967, "learning_rate": 1.932e-06, "loss": 2.5623, "step": 646 }, { "epoch": 3.636619718309859, "grad_norm": 3.267350673675537, "learning_rate": 1.935e-06, "loss": 2.5574, "step": 647 }, { "epoch": 3.6422535211267606, "grad_norm": 9.421785354614258, "learning_rate": 1.9380000000000003e-06, "loss": 2.539, "step": 648 }, { "epoch": 3.647887323943662, "grad_norm": 5.227574825286865, "learning_rate": 1.9409999999999997e-06, "loss": 2.5801, "step": 649 }, { "epoch": 3.6535211267605634, "grad_norm": 2.5911121368408203, "learning_rate": 1.944e-06, "loss": 2.4754, "step": 650 }, { "epoch": 3.659154929577465, "grad_norm": 3.5224828720092773, "learning_rate": 1.947e-06, "loss": 2.5001, "step": 651 }, { "epoch": 3.664788732394366, "grad_norm": 4.783766269683838, "learning_rate": 1.95e-06, "loss": 2.5012, "step": 652 }, { "epoch": 3.6704225352112676, "grad_norm": 3.3553149700164795, "learning_rate": 1.953e-06, "loss": 2.531, "step": 653 }, { "epoch": 3.676056338028169, "grad_norm": 2.931241512298584, "learning_rate": 1.956e-06, "loss": 2.497, "step": 654 }, { "epoch": 3.6816901408450704, "grad_norm": 4.4694695472717285, "learning_rate": 1.959e-06, "loss": 2.4942, "step": 655 }, { "epoch": 3.687323943661972, "grad_norm": 3.122934103012085, "learning_rate": 1.962e-06, "loss": 2.4754, "step": 656 }, { "epoch": 3.692957746478873, "grad_norm": 6.193077087402344, "learning_rate": 1.9650000000000002e-06, "loss": 2.4694, "step": 657 }, { "epoch": 3.6985915492957746, "grad_norm": 2.414370059967041, "learning_rate": 1.968e-06, "loss": 2.4517, "step": 658 }, { "epoch": 3.704225352112676, "grad_norm": 3.34991717338562, "learning_rate": 1.971e-06, "loss": 2.4296, "step": 659 }, { "epoch": 3.7098591549295774, "grad_norm": 6.847357749938965, "learning_rate": 1.974e-06, "loss": 2.4321, "step": 660 }, { "epoch": 3.715492957746479, "grad_norm": 3.4056034088134766, "learning_rate": 1.977e-06, "loss": 2.4373, "step": 661 }, { "epoch": 3.72112676056338, "grad_norm": 2.5987486839294434, "learning_rate": 1.98e-06, "loss": 2.4231, "step": 662 }, { "epoch": 3.7267605633802816, "grad_norm": 2.899972915649414, "learning_rate": 1.9830000000000003e-06, "loss": 2.4102, "step": 663 }, { "epoch": 3.732394366197183, "grad_norm": 4.216456890106201, "learning_rate": 1.9859999999999997e-06, "loss": 2.4324, "step": 664 }, { "epoch": 3.7380281690140844, "grad_norm": 3.7976484298706055, "learning_rate": 1.989e-06, "loss": 2.3899, "step": 665 }, { "epoch": 3.743661971830986, "grad_norm": 13.57735538482666, "learning_rate": 1.992e-06, "loss": 2.3653, "step": 666 }, { "epoch": 3.749295774647887, "grad_norm": 5.536102771759033, "learning_rate": 1.995e-06, "loss": 2.6115, "step": 667 }, { "epoch": 3.7549295774647886, "grad_norm": 3.1471431255340576, "learning_rate": 1.998e-06, "loss": 2.6113, "step": 668 }, { "epoch": 3.76056338028169, "grad_norm": 2.3586504459381104, "learning_rate": 2.001e-06, "loss": 2.5347, "step": 669 }, { "epoch": 3.7661971830985914, "grad_norm": 3.342392683029175, "learning_rate": 2.004e-06, "loss": 2.5331, "step": 670 }, { "epoch": 3.771830985915493, "grad_norm": 3.824932098388672, "learning_rate": 2.007e-06, "loss": 2.4711, "step": 671 }, { "epoch": 3.777464788732394, "grad_norm": 2.8359029293060303, "learning_rate": 2.0100000000000002e-06, "loss": 2.454, "step": 672 }, { "epoch": 3.7830985915492956, "grad_norm": 3.8589389324188232, "learning_rate": 2.0130000000000005e-06, "loss": 2.3785, "step": 673 }, { "epoch": 3.788732394366197, "grad_norm": 4.179661750793457, "learning_rate": 2.016e-06, "loss": 2.4114, "step": 674 }, { "epoch": 3.7943661971830984, "grad_norm": 4.384422302246094, "learning_rate": 2.019e-06, "loss": 2.399, "step": 675 }, { "epoch": 3.8, "grad_norm": 2.1641438007354736, "learning_rate": 2.0220000000000003e-06, "loss": 2.3519, "step": 676 }, { "epoch": 3.8056338028169012, "grad_norm": 4.078607082366943, "learning_rate": 2.025e-06, "loss": 2.3549, "step": 677 }, { "epoch": 3.8112676056338026, "grad_norm": 4.0506181716918945, "learning_rate": 2.028e-06, "loss": 2.3168, "step": 678 }, { "epoch": 3.816901408450704, "grad_norm": 3.3806586265563965, "learning_rate": 2.031e-06, "loss": 2.3383, "step": 679 }, { "epoch": 3.8225352112676054, "grad_norm": 2.962216854095459, "learning_rate": 2.034e-06, "loss": 2.2476, "step": 680 }, { "epoch": 3.828169014084507, "grad_norm": 2.2728936672210693, "learning_rate": 2.037e-06, "loss": 2.2852, "step": 681 }, { "epoch": 3.8338028169014082, "grad_norm": 2.4136433601379395, "learning_rate": 2.0400000000000004e-06, "loss": 2.2914, "step": 682 }, { "epoch": 3.8394366197183096, "grad_norm": 2.7648229598999023, "learning_rate": 2.0429999999999998e-06, "loss": 2.1966, "step": 683 }, { "epoch": 3.845070422535211, "grad_norm": 2.779562473297119, "learning_rate": 2.046e-06, "loss": 2.2622, "step": 684 }, { "epoch": 3.8507042253521124, "grad_norm": 2.7937114238739014, "learning_rate": 2.049e-06, "loss": 2.1844, "step": 685 }, { "epoch": 3.856338028169014, "grad_norm": 2.820657730102539, "learning_rate": 2.052e-06, "loss": 2.1237, "step": 686 }, { "epoch": 3.8619718309859152, "grad_norm": 3.204495668411255, "learning_rate": 2.0550000000000002e-06, "loss": 2.1909, "step": 687 }, { "epoch": 3.8676056338028166, "grad_norm": 3.271052598953247, "learning_rate": 2.058e-06, "loss": 2.1032, "step": 688 }, { "epoch": 3.873239436619718, "grad_norm": 3.531216621398926, "learning_rate": 2.061e-06, "loss": 2.144, "step": 689 }, { "epoch": 3.87887323943662, "grad_norm": 2.4491655826568604, "learning_rate": 2.064e-06, "loss": 2.0294, "step": 690 }, { "epoch": 3.8845070422535213, "grad_norm": 4.347051620483398, "learning_rate": 2.0670000000000003e-06, "loss": 2.0617, "step": 691 }, { "epoch": 3.8901408450704227, "grad_norm": 4.764668941497803, "learning_rate": 2.07e-06, "loss": 2.0337, "step": 692 }, { "epoch": 3.895774647887324, "grad_norm": 7.034329891204834, "learning_rate": 2.073e-06, "loss": 1.9692, "step": 693 }, { "epoch": 3.9014084507042255, "grad_norm": 2.9240710735321045, "learning_rate": 2.076e-06, "loss": 1.9619, "step": 694 }, { "epoch": 3.907042253521127, "grad_norm": 4.533735752105713, "learning_rate": 2.079e-06, "loss": 1.9579, "step": 695 }, { "epoch": 3.9126760563380283, "grad_norm": 2.520022392272949, "learning_rate": 2.082e-06, "loss": 2.0109, "step": 696 }, { "epoch": 3.9183098591549297, "grad_norm": 2.678165912628174, "learning_rate": 2.0850000000000004e-06, "loss": 1.9549, "step": 697 }, { "epoch": 3.923943661971831, "grad_norm": 3.2894513607025146, "learning_rate": 2.0879999999999997e-06, "loss": 1.8676, "step": 698 }, { "epoch": 3.9295774647887325, "grad_norm": 2.6271660327911377, "learning_rate": 2.091e-06, "loss": 1.9041, "step": 699 }, { "epoch": 3.935211267605634, "grad_norm": 4.272736549377441, "learning_rate": 2.094e-06, "loss": 1.8641, "step": 700 }, { "epoch": 3.9408450704225353, "grad_norm": 3.6323680877685547, "learning_rate": 2.097e-06, "loss": 1.7602, "step": 701 }, { "epoch": 3.9464788732394367, "grad_norm": 3.2892682552337646, "learning_rate": 2.1000000000000002e-06, "loss": 1.8483, "step": 702 }, { "epoch": 3.952112676056338, "grad_norm": 3.96871018409729, "learning_rate": 2.103e-06, "loss": 1.7396, "step": 703 }, { "epoch": 3.9577464788732395, "grad_norm": 3.023526668548584, "learning_rate": 2.106e-06, "loss": 1.7653, "step": 704 }, { "epoch": 3.963380281690141, "grad_norm": 3.367448091506958, "learning_rate": 2.109e-06, "loss": 1.7519, "step": 705 }, { "epoch": 3.9690140845070423, "grad_norm": 3.7302796840667725, "learning_rate": 2.1120000000000003e-06, "loss": 1.6711, "step": 706 }, { "epoch": 3.9746478873239437, "grad_norm": 3.7805285453796387, "learning_rate": 2.1149999999999997e-06, "loss": 1.7193, "step": 707 }, { "epoch": 3.980281690140845, "grad_norm": 3.4666497707366943, "learning_rate": 2.118e-06, "loss": 1.669, "step": 708 }, { "epoch": 3.9859154929577465, "grad_norm": 4.103410243988037, "learning_rate": 2.121e-06, "loss": 1.7194, "step": 709 }, { "epoch": 3.991549295774648, "grad_norm": NaN, "learning_rate": 2.121e-06, "loss": 1.7002, "step": 710 }, { "epoch": 3.9971830985915493, "grad_norm": 5.339576244354248, "learning_rate": 2.124e-06, "loss": 1.8009, "step": 711 }, { "epoch": 4.0, "grad_norm": 6.8820624351501465, "learning_rate": 2.127e-06, "loss": 0.8197, "step": 712 }, { "epoch": 4.005633802816901, "grad_norm": 8.731258392333984, "learning_rate": 2.13e-06, "loss": 2.1075, "step": 713 }, { "epoch": 4.011267605633803, "grad_norm": 4.658092498779297, "learning_rate": 2.133e-06, "loss": 2.0035, "step": 714 }, { "epoch": 4.016901408450704, "grad_norm": 3.997706890106201, "learning_rate": 2.136e-06, "loss": 1.9042, "step": 715 }, { "epoch": 4.022535211267606, "grad_norm": 5.904595375061035, "learning_rate": 2.139e-06, "loss": 1.8859, "step": 716 }, { "epoch": 4.028169014084507, "grad_norm": 4.508280277252197, "learning_rate": 2.1420000000000004e-06, "loss": 1.7883, "step": 717 }, { "epoch": 4.033802816901408, "grad_norm": 3.468778371810913, "learning_rate": 2.145e-06, "loss": 1.7626, "step": 718 }, { "epoch": 4.03943661971831, "grad_norm": 3.4685027599334717, "learning_rate": 2.148e-06, "loss": 1.7001, "step": 719 }, { "epoch": 4.045070422535211, "grad_norm": 3.296505928039551, "learning_rate": 2.1510000000000002e-06, "loss": 1.669, "step": 720 }, { "epoch": 4.050704225352113, "grad_norm": 4.874464988708496, "learning_rate": 2.154e-06, "loss": 1.705, "step": 721 }, { "epoch": 4.056338028169014, "grad_norm": 4.497097969055176, "learning_rate": 2.1570000000000003e-06, "loss": 1.6824, "step": 722 }, { "epoch": 4.061971830985915, "grad_norm": 3.7446484565734863, "learning_rate": 2.16e-06, "loss": 1.7157, "step": 723 }, { "epoch": 4.067605633802817, "grad_norm": 3.5548088550567627, "learning_rate": 2.163e-06, "loss": 1.6035, "step": 724 }, { "epoch": 4.073239436619718, "grad_norm": 3.665282964706421, "learning_rate": 2.166e-06, "loss": 1.5725, "step": 725 }, { "epoch": 4.07887323943662, "grad_norm": 3.388298749923706, "learning_rate": 2.1690000000000003e-06, "loss": 1.5237, "step": 726 }, { "epoch": 4.084507042253521, "grad_norm": 2.3221206665039062, "learning_rate": 2.172e-06, "loss": 1.5083, "step": 727 }, { "epoch": 4.090140845070422, "grad_norm": 14.295064926147461, "learning_rate": 2.175e-06, "loss": 1.4483, "step": 728 }, { "epoch": 4.095774647887324, "grad_norm": 4.429758548736572, "learning_rate": 2.178e-06, "loss": 1.4727, "step": 729 }, { "epoch": 4.101408450704225, "grad_norm": 4.3126959800720215, "learning_rate": 2.181e-06, "loss": 1.4766, "step": 730 }, { "epoch": 4.107042253521127, "grad_norm": 2.687000036239624, "learning_rate": 2.184e-06, "loss": 1.4647, "step": 731 }, { "epoch": 4.112676056338028, "grad_norm": 3.530883550643921, "learning_rate": 2.1870000000000004e-06, "loss": 1.4935, "step": 732 }, { "epoch": 4.118309859154929, "grad_norm": 4.200030326843262, "learning_rate": 2.1899999999999998e-06, "loss": 1.4652, "step": 733 }, { "epoch": 4.123943661971831, "grad_norm": 7.423676013946533, "learning_rate": 2.193e-06, "loss": 1.3852, "step": 734 }, { "epoch": 4.129577464788732, "grad_norm": 2.8539798259735107, "learning_rate": 2.1960000000000002e-06, "loss": 1.3247, "step": 735 }, { "epoch": 4.135211267605634, "grad_norm": 3.968510627746582, "learning_rate": 2.199e-06, "loss": 1.2985, "step": 736 }, { "epoch": 4.140845070422535, "grad_norm": 2.972294807434082, "learning_rate": 2.2020000000000003e-06, "loss": 1.3183, "step": 737 }, { "epoch": 4.146478873239436, "grad_norm": 3.018853187561035, "learning_rate": 2.205e-06, "loss": 1.2738, "step": 738 }, { "epoch": 4.152112676056338, "grad_norm": 2.274994134902954, "learning_rate": 2.208e-06, "loss": 1.3257, "step": 739 }, { "epoch": 4.157746478873239, "grad_norm": 2.3417112827301025, "learning_rate": 2.211e-06, "loss": 1.2046, "step": 740 }, { "epoch": 4.163380281690141, "grad_norm": 3.256418228149414, "learning_rate": 2.2140000000000003e-06, "loss": 1.2625, "step": 741 }, { "epoch": 4.169014084507042, "grad_norm": 2.557704210281372, "learning_rate": 2.2169999999999997e-06, "loss": 1.2124, "step": 742 }, { "epoch": 4.174647887323943, "grad_norm": 3.7538537979125977, "learning_rate": 2.22e-06, "loss": 1.1492, "step": 743 }, { "epoch": 4.180281690140845, "grad_norm": 2.9859628677368164, "learning_rate": 2.223e-06, "loss": 1.2012, "step": 744 }, { "epoch": 4.185915492957746, "grad_norm": 3.2278072834014893, "learning_rate": 2.226e-06, "loss": 1.2356, "step": 745 }, { "epoch": 4.191549295774648, "grad_norm": 3.32592511177063, "learning_rate": 2.229e-06, "loss": 1.1824, "step": 746 }, { "epoch": 4.197183098591549, "grad_norm": 4.771190643310547, "learning_rate": 2.232e-06, "loss": 1.1087, "step": 747 }, { "epoch": 4.20281690140845, "grad_norm": 2.638829231262207, "learning_rate": 2.2349999999999998e-06, "loss": 1.1162, "step": 748 }, { "epoch": 4.208450704225352, "grad_norm": 6.826143264770508, "learning_rate": 2.238e-06, "loss": 1.1157, "step": 749 }, { "epoch": 4.214084507042253, "grad_norm": 3.0462405681610107, "learning_rate": 2.2410000000000002e-06, "loss": 1.0576, "step": 750 }, { "epoch": 4.219718309859155, "grad_norm": 4.8925909996032715, "learning_rate": 2.244e-06, "loss": 1.2062, "step": 751 }, { "epoch": 4.225352112676056, "grad_norm": 5.137844562530518, "learning_rate": 2.247e-06, "loss": 1.1099, "step": 752 }, { "epoch": 4.230985915492957, "grad_norm": 4.4226861000061035, "learning_rate": 2.25e-06, "loss": 1.0575, "step": 753 }, { "epoch": 4.236619718309859, "grad_norm": 8.942727088928223, "learning_rate": 2.253e-06, "loss": 1.0234, "step": 754 }, { "epoch": 4.24225352112676, "grad_norm": 5.536681652069092, "learning_rate": 2.256e-06, "loss": 1.0121, "step": 755 }, { "epoch": 4.247887323943662, "grad_norm": 5.978938102722168, "learning_rate": 2.2590000000000003e-06, "loss": 0.9913, "step": 756 }, { "epoch": 4.253521126760563, "grad_norm": 9.773181915283203, "learning_rate": 2.262e-06, "loss": 1.4302, "step": 757 }, { "epoch": 4.259154929577464, "grad_norm": 8.560917854309082, "learning_rate": 2.265e-06, "loss": 1.4571, "step": 758 }, { "epoch": 4.264788732394366, "grad_norm": 5.962185382843018, "learning_rate": 2.268e-06, "loss": 1.3484, "step": 759 }, { "epoch": 4.270422535211267, "grad_norm": 3.354548454284668, "learning_rate": 2.2710000000000004e-06, "loss": 1.2123, "step": 760 }, { "epoch": 4.276056338028169, "grad_norm": 3.654059410095215, "learning_rate": 2.274e-06, "loss": 1.3055, "step": 761 }, { "epoch": 4.28169014084507, "grad_norm": 4.993838787078857, "learning_rate": 2.277e-06, "loss": 1.3121, "step": 762 }, { "epoch": 4.2873239436619714, "grad_norm": 3.195439577102661, "learning_rate": 2.28e-06, "loss": 1.1986, "step": 763 }, { "epoch": 4.292957746478873, "grad_norm": 2.968320608139038, "learning_rate": 2.283e-06, "loss": 1.2231, "step": 764 }, { "epoch": 4.298591549295774, "grad_norm": 3.53824520111084, "learning_rate": 2.2860000000000002e-06, "loss": 1.2196, "step": 765 }, { "epoch": 4.304225352112676, "grad_norm": 5.174617767333984, "learning_rate": 2.2890000000000004e-06, "loss": 1.1367, "step": 766 }, { "epoch": 4.309859154929577, "grad_norm": 8.613746643066406, "learning_rate": 2.292e-06, "loss": 1.0579, "step": 767 }, { "epoch": 4.3154929577464785, "grad_norm": 3.378936529159546, "learning_rate": 2.295e-06, "loss": 1.077, "step": 768 }, { "epoch": 4.321126760563381, "grad_norm": 3.4128663539886475, "learning_rate": 2.2980000000000003e-06, "loss": 1.0403, "step": 769 }, { "epoch": 4.326760563380281, "grad_norm": 4.564036846160889, "learning_rate": 2.301e-06, "loss": 1.0029, "step": 770 }, { "epoch": 4.3323943661971835, "grad_norm": 4.310069561004639, "learning_rate": 2.304e-06, "loss": 1.0173, "step": 771 }, { "epoch": 4.338028169014084, "grad_norm": 4.131349086761475, "learning_rate": 2.307e-06, "loss": 1.0233, "step": 772 }, { "epoch": 4.343661971830986, "grad_norm": 2.9398694038391113, "learning_rate": 2.31e-06, "loss": 0.9647, "step": 773 }, { "epoch": 4.349295774647887, "grad_norm": 2.1672401428222656, "learning_rate": 2.313e-06, "loss": 0.9424, "step": 774 }, { "epoch": 4.354929577464789, "grad_norm": 3.017409324645996, "learning_rate": 2.3160000000000004e-06, "loss": 0.9195, "step": 775 }, { "epoch": 4.3605633802816905, "grad_norm": 7.53600549697876, "learning_rate": 2.3189999999999997e-06, "loss": 0.9747, "step": 776 }, { "epoch": 4.366197183098592, "grad_norm": 4.035458564758301, "learning_rate": 2.322e-06, "loss": 0.9055, "step": 777 }, { "epoch": 4.371830985915493, "grad_norm": 2.679673671722412, "learning_rate": 2.325e-06, "loss": 0.911, "step": 778 }, { "epoch": 4.377464788732395, "grad_norm": 2.053769588470459, "learning_rate": 2.328e-06, "loss": 0.8975, "step": 779 }, { "epoch": 4.383098591549296, "grad_norm": 3.9188501834869385, "learning_rate": 2.3310000000000002e-06, "loss": 0.8792, "step": 780 }, { "epoch": 4.3887323943661976, "grad_norm": 2.785120725631714, "learning_rate": 2.334e-06, "loss": 0.8571, "step": 781 }, { "epoch": 4.394366197183099, "grad_norm": 2.4356346130371094, "learning_rate": 2.337e-06, "loss": 0.8674, "step": 782 }, { "epoch": 4.4, "grad_norm": 2.8814375400543213, "learning_rate": 2.34e-06, "loss": 0.8873, "step": 783 }, { "epoch": 4.405633802816902, "grad_norm": 5.882245063781738, "learning_rate": 2.3430000000000003e-06, "loss": 0.846, "step": 784 }, { "epoch": 4.411267605633803, "grad_norm": 4.481248378753662, "learning_rate": 2.346e-06, "loss": 0.7242, "step": 785 }, { "epoch": 4.416901408450705, "grad_norm": 3.376279592514038, "learning_rate": 2.349e-06, "loss": 0.8909, "step": 786 }, { "epoch": 4.422535211267606, "grad_norm": 5.917909622192383, "learning_rate": 2.352e-06, "loss": 0.8524, "step": 787 }, { "epoch": 4.428169014084507, "grad_norm": 2.9090683460235596, "learning_rate": 2.355e-06, "loss": 0.7465, "step": 788 }, { "epoch": 4.433802816901409, "grad_norm": 4.003406524658203, "learning_rate": 2.358e-06, "loss": 0.8354, "step": 789 }, { "epoch": 4.43943661971831, "grad_norm": 14.42676067352295, "learning_rate": 2.3610000000000003e-06, "loss": 0.7535, "step": 790 }, { "epoch": 4.445070422535212, "grad_norm": 3.3684017658233643, "learning_rate": 2.3639999999999997e-06, "loss": 0.8171, "step": 791 }, { "epoch": 4.450704225352113, "grad_norm": 3.4548377990722656, "learning_rate": 2.367e-06, "loss": 0.6947, "step": 792 }, { "epoch": 4.456338028169014, "grad_norm": 3.2732181549072266, "learning_rate": 2.37e-06, "loss": 0.7211, "step": 793 }, { "epoch": 4.461971830985916, "grad_norm": 3.2089741230010986, "learning_rate": 2.373e-06, "loss": 0.656, "step": 794 }, { "epoch": 4.467605633802817, "grad_norm": 3.5135998725891113, "learning_rate": 2.376e-06, "loss": 0.764, "step": 795 }, { "epoch": 4.473239436619719, "grad_norm": 2.727151393890381, "learning_rate": 2.379e-06, "loss": 0.6935, "step": 796 }, { "epoch": 4.47887323943662, "grad_norm": 6.873143196105957, "learning_rate": 2.382e-06, "loss": 0.6877, "step": 797 }, { "epoch": 4.484507042253521, "grad_norm": 3.446615695953369, "learning_rate": 2.385e-06, "loss": 0.6804, "step": 798 }, { "epoch": 4.490140845070423, "grad_norm": 5.030722618103027, "learning_rate": 2.3880000000000003e-06, "loss": 0.7444, "step": 799 }, { "epoch": 4.495774647887324, "grad_norm": 6.555557727813721, "learning_rate": 2.391e-06, "loss": 0.7532, "step": 800 }, { "epoch": 4.501408450704226, "grad_norm": 11.13542652130127, "learning_rate": 2.394e-06, "loss": 1.2519, "step": 801 }, { "epoch": 4.507042253521127, "grad_norm": 6.424833297729492, "learning_rate": 2.397e-06, "loss": 1.1184, "step": 802 }, { "epoch": 4.512676056338028, "grad_norm": 5.684906959533691, "learning_rate": 2.4000000000000003e-06, "loss": 1.1603, "step": 803 }, { "epoch": 4.51830985915493, "grad_norm": 6.032043933868408, "learning_rate": 2.403e-06, "loss": 0.9755, "step": 804 }, { "epoch": 4.523943661971831, "grad_norm": 7.522682189941406, "learning_rate": 2.406e-06, "loss": 1.0374, "step": 805 }, { "epoch": 4.529577464788733, "grad_norm": 4.559472560882568, "learning_rate": 2.409e-06, "loss": 0.963, "step": 806 }, { "epoch": 4.535211267605634, "grad_norm": 8.044770240783691, "learning_rate": 2.412e-06, "loss": 0.8639, "step": 807 }, { "epoch": 4.540845070422535, "grad_norm": 4.376523017883301, "learning_rate": 2.415e-06, "loss": 0.8814, "step": 808 }, { "epoch": 4.546478873239437, "grad_norm": 5.2792534828186035, "learning_rate": 2.4180000000000004e-06, "loss": 0.9314, "step": 809 }, { "epoch": 4.552112676056338, "grad_norm": 6.261806964874268, "learning_rate": 2.4209999999999998e-06, "loss": 0.8478, "step": 810 }, { "epoch": 4.55774647887324, "grad_norm": 5.189519882202148, "learning_rate": 2.424e-06, "loss": 0.9977, "step": 811 }, { "epoch": 4.563380281690141, "grad_norm": 5.815868854522705, "learning_rate": 2.4270000000000002e-06, "loss": 0.8007, "step": 812 }, { "epoch": 4.569014084507042, "grad_norm": 5.150207042694092, "learning_rate": 2.43e-06, "loss": 0.9216, "step": 813 }, { "epoch": 4.574647887323944, "grad_norm": 3.9940452575683594, "learning_rate": 2.4330000000000003e-06, "loss": 0.8655, "step": 814 }, { "epoch": 4.580281690140845, "grad_norm": 2.9555556774139404, "learning_rate": 2.436e-06, "loss": 0.8565, "step": 815 }, { "epoch": 4.585915492957747, "grad_norm": 3.326611280441284, "learning_rate": 2.439e-06, "loss": 0.7621, "step": 816 }, { "epoch": 4.591549295774648, "grad_norm": 3.882078170776367, "learning_rate": 2.442e-06, "loss": 0.7741, "step": 817 }, { "epoch": 4.597183098591549, "grad_norm": 11.13386344909668, "learning_rate": 2.4450000000000003e-06, "loss": 0.7245, "step": 818 }, { "epoch": 4.602816901408451, "grad_norm": 4.292423248291016, "learning_rate": 2.448e-06, "loss": 0.7295, "step": 819 }, { "epoch": 4.608450704225352, "grad_norm": 10.06823444366455, "learning_rate": 2.451e-06, "loss": 0.6212, "step": 820 }, { "epoch": 4.614084507042254, "grad_norm": 4.313129901885986, "learning_rate": 2.454e-06, "loss": 0.6961, "step": 821 }, { "epoch": 4.619718309859155, "grad_norm": 6.2645182609558105, "learning_rate": 2.457e-06, "loss": 0.7328, "step": 822 }, { "epoch": 4.625352112676056, "grad_norm": 2.457139015197754, "learning_rate": 2.46e-06, "loss": 0.7715, "step": 823 }, { "epoch": 4.630985915492958, "grad_norm": 3.2750065326690674, "learning_rate": 2.4630000000000004e-06, "loss": 0.7052, "step": 824 }, { "epoch": 4.636619718309859, "grad_norm": 4.161436557769775, "learning_rate": 2.4659999999999998e-06, "loss": 0.6182, "step": 825 }, { "epoch": 4.642253521126761, "grad_norm": 6.171960353851318, "learning_rate": 2.469e-06, "loss": 0.6678, "step": 826 }, { "epoch": 4.647887323943662, "grad_norm": 4.594521999359131, "learning_rate": 2.4720000000000002e-06, "loss": 0.6498, "step": 827 }, { "epoch": 4.653521126760563, "grad_norm": 5.677289009094238, "learning_rate": 2.475e-06, "loss": 0.6358, "step": 828 }, { "epoch": 4.659154929577465, "grad_norm": 7.616566181182861, "learning_rate": 2.4780000000000002e-06, "loss": 0.6303, "step": 829 }, { "epoch": 4.664788732394366, "grad_norm": 3.6935417652130127, "learning_rate": 2.481e-06, "loss": 0.695, "step": 830 }, { "epoch": 4.670422535211268, "grad_norm": 6.680566310882568, "learning_rate": 2.484e-06, "loss": 0.6622, "step": 831 }, { "epoch": 4.676056338028169, "grad_norm": 4.233198165893555, "learning_rate": 2.487e-06, "loss": 0.5588, "step": 832 }, { "epoch": 4.68169014084507, "grad_norm": 2.316086769104004, "learning_rate": 2.4900000000000003e-06, "loss": 0.6056, "step": 833 }, { "epoch": 4.687323943661972, "grad_norm": 4.981112480163574, "learning_rate": 2.4929999999999997e-06, "loss": 0.615, "step": 834 }, { "epoch": 4.692957746478873, "grad_norm": 2.577280044555664, "learning_rate": 2.496e-06, "loss": 0.6051, "step": 835 }, { "epoch": 4.698591549295775, "grad_norm": 3.450861930847168, "learning_rate": 2.499e-06, "loss": 0.6528, "step": 836 }, { "epoch": 4.704225352112676, "grad_norm": 2.7186598777770996, "learning_rate": 2.502e-06, "loss": 0.6306, "step": 837 }, { "epoch": 4.709859154929577, "grad_norm": 2.859384298324585, "learning_rate": 2.505e-06, "loss": 0.6805, "step": 838 }, { "epoch": 4.715492957746479, "grad_norm": 3.7891767024993896, "learning_rate": 2.508e-06, "loss": 0.6214, "step": 839 }, { "epoch": 4.72112676056338, "grad_norm": 3.8955564498901367, "learning_rate": 2.5109999999999998e-06, "loss": 0.5786, "step": 840 }, { "epoch": 4.726760563380282, "grad_norm": 6.762790203094482, "learning_rate": 2.514e-06, "loss": 0.5551, "step": 841 }, { "epoch": 4.732394366197183, "grad_norm": 3.186650276184082, "learning_rate": 2.517e-06, "loss": 0.5718, "step": 842 }, { "epoch": 4.738028169014084, "grad_norm": 4.856934547424316, "learning_rate": 2.52e-06, "loss": 0.5954, "step": 843 }, { "epoch": 4.743661971830986, "grad_norm": 5.6527791023254395, "learning_rate": 2.523e-06, "loss": 0.6891, "step": 844 }, { "epoch": 4.749295774647887, "grad_norm": 6.431458473205566, "learning_rate": 2.526e-06, "loss": 1.0238, "step": 845 }, { "epoch": 4.754929577464789, "grad_norm": 3.19467830657959, "learning_rate": 2.5290000000000003e-06, "loss": 0.9253, "step": 846 }, { "epoch": 4.76056338028169, "grad_norm": 3.688904285430908, "learning_rate": 2.532e-06, "loss": 0.8768, "step": 847 }, { "epoch": 4.766197183098591, "grad_norm": 3.788217067718506, "learning_rate": 2.5350000000000003e-06, "loss": 0.9304, "step": 848 }, { "epoch": 4.771830985915493, "grad_norm": 3.510573625564575, "learning_rate": 2.538e-06, "loss": 0.8295, "step": 849 }, { "epoch": 4.777464788732394, "grad_norm": 3.672971248626709, "learning_rate": 2.541e-06, "loss": 0.8227, "step": 850 }, { "epoch": 4.783098591549296, "grad_norm": 9.765240669250488, "learning_rate": 2.544e-06, "loss": 0.8331, "step": 851 }, { "epoch": 4.788732394366197, "grad_norm": 4.7542924880981445, "learning_rate": 2.5470000000000003e-06, "loss": 0.8178, "step": 852 }, { "epoch": 4.794366197183098, "grad_norm": 2.981187105178833, "learning_rate": 2.55e-06, "loss": 0.8533, "step": 853 }, { "epoch": 4.8, "grad_norm": 5.025519847869873, "learning_rate": 2.553e-06, "loss": 0.8352, "step": 854 }, { "epoch": 4.805633802816901, "grad_norm": 3.1089770793914795, "learning_rate": 2.556e-06, "loss": 0.7018, "step": 855 }, { "epoch": 4.811267605633803, "grad_norm": 2.9379541873931885, "learning_rate": 2.559e-06, "loss": 0.7346, "step": 856 }, { "epoch": 4.816901408450704, "grad_norm": 3.395998001098633, "learning_rate": 2.562e-06, "loss": 0.7934, "step": 857 }, { "epoch": 4.822535211267605, "grad_norm": 3.98203182220459, "learning_rate": 2.5650000000000004e-06, "loss": 0.7462, "step": 858 }, { "epoch": 4.828169014084507, "grad_norm": 2.7102365493774414, "learning_rate": 2.568e-06, "loss": 0.6715, "step": 859 }, { "epoch": 4.833802816901408, "grad_norm": 6.256322860717773, "learning_rate": 2.571e-06, "loss": 0.8342, "step": 860 }, { "epoch": 4.83943661971831, "grad_norm": 2.3925929069519043, "learning_rate": 2.5740000000000003e-06, "loss": 0.6078, "step": 861 }, { "epoch": 4.845070422535211, "grad_norm": 2.342928647994995, "learning_rate": 2.577e-06, "loss": 0.6536, "step": 862 }, { "epoch": 4.850704225352112, "grad_norm": 2.7884795665740967, "learning_rate": 2.58e-06, "loss": 0.6163, "step": 863 }, { "epoch": 4.856338028169014, "grad_norm": 2.60550856590271, "learning_rate": 2.583e-06, "loss": 0.6322, "step": 864 }, { "epoch": 4.861971830985915, "grad_norm": 3.2051239013671875, "learning_rate": 2.586e-06, "loss": 0.5733, "step": 865 }, { "epoch": 4.867605633802817, "grad_norm": 2.349964141845703, "learning_rate": 2.589e-06, "loss": 0.5361, "step": 866 }, { "epoch": 4.873239436619718, "grad_norm": 7.408186435699463, "learning_rate": 2.5920000000000003e-06, "loss": 0.6847, "step": 867 }, { "epoch": 4.878873239436619, "grad_norm": 108.75530242919922, "learning_rate": 2.5949999999999997e-06, "loss": 0.5442, "step": 868 }, { "epoch": 4.884507042253521, "grad_norm": 3.8446199893951416, "learning_rate": 2.598e-06, "loss": 0.6033, "step": 869 }, { "epoch": 4.890140845070422, "grad_norm": 2.7630155086517334, "learning_rate": 2.601e-06, "loss": 0.5898, "step": 870 }, { "epoch": 4.895774647887324, "grad_norm": 2.9433813095092773, "learning_rate": 2.604e-06, "loss": 0.6187, "step": 871 }, { "epoch": 4.901408450704225, "grad_norm": 3.2574610710144043, "learning_rate": 2.607e-06, "loss": 0.4792, "step": 872 }, { "epoch": 4.907042253521126, "grad_norm": 2.240130662918091, "learning_rate": 2.61e-06, "loss": 0.4657, "step": 873 }, { "epoch": 4.912676056338028, "grad_norm": 3.9430322647094727, "learning_rate": 2.613e-06, "loss": 0.6533, "step": 874 }, { "epoch": 4.918309859154929, "grad_norm": 2.186457633972168, "learning_rate": 2.616e-06, "loss": 0.4592, "step": 875 }, { "epoch": 4.923943661971831, "grad_norm": 6.3133955001831055, "learning_rate": 2.6190000000000003e-06, "loss": 0.6135, "step": 876 }, { "epoch": 4.929577464788732, "grad_norm": 2.4026143550872803, "learning_rate": 2.622e-06, "loss": 0.6044, "step": 877 }, { "epoch": 4.9352112676056334, "grad_norm": 2.5374703407287598, "learning_rate": 2.625e-06, "loss": 0.5245, "step": 878 }, { "epoch": 4.940845070422535, "grad_norm": 2.3137059211730957, "learning_rate": 2.628e-06, "loss": 0.6356, "step": 879 }, { "epoch": 4.946478873239436, "grad_norm": 3.0200490951538086, "learning_rate": 2.631e-06, "loss": 0.5608, "step": 880 }, { "epoch": 4.952112676056338, "grad_norm": 2.407966375350952, "learning_rate": 2.634e-06, "loss": 0.5017, "step": 881 }, { "epoch": 4.957746478873239, "grad_norm": 4.552791118621826, "learning_rate": 2.6370000000000003e-06, "loss": 0.5556, "step": 882 }, { "epoch": 4.9633802816901404, "grad_norm": 2.422546863555908, "learning_rate": 2.6399999999999997e-06, "loss": 0.4539, "step": 883 }, { "epoch": 4.969014084507043, "grad_norm": 2.9516642093658447, "learning_rate": 2.643e-06, "loss": 0.5157, "step": 884 }, { "epoch": 4.974647887323943, "grad_norm": 3.9323694705963135, "learning_rate": 2.646e-06, "loss": 0.5258, "step": 885 }, { "epoch": 4.9802816901408455, "grad_norm": 4.032447338104248, "learning_rate": 2.649e-06, "loss": 0.6014, "step": 886 }, { "epoch": 4.985915492957746, "grad_norm": 4.838427543640137, "learning_rate": 2.652e-06, "loss": 0.4939, "step": 887 }, { "epoch": 4.991549295774648, "grad_norm": 7.549025535583496, "learning_rate": 2.655e-06, "loss": 0.5463, "step": 888 }, { "epoch": 4.997183098591549, "grad_norm": 2.98043155670166, "learning_rate": 2.6580000000000002e-06, "loss": 0.6438, "step": 889 }, { "epoch": 5.0, "grad_norm": 3.0263454914093018, "learning_rate": 2.661e-06, "loss": 0.2365, "step": 890 }, { "epoch": 5.005633802816901, "grad_norm": 3.7286951541900635, "learning_rate": 2.6640000000000002e-06, "loss": 0.8897, "step": 891 }, { "epoch": 5.011267605633803, "grad_norm": 3.884514570236206, "learning_rate": 2.6670000000000005e-06, "loss": 0.7991, "step": 892 }, { "epoch": 5.016901408450704, "grad_norm": 3.591808319091797, "learning_rate": 2.67e-06, "loss": 0.7984, "step": 893 }, { "epoch": 5.022535211267606, "grad_norm": 2.895336866378784, "learning_rate": 2.673e-06, "loss": 0.8905, "step": 894 }, { "epoch": 5.028169014084507, "grad_norm": 4.619030952453613, "learning_rate": 2.6760000000000003e-06, "loss": 0.7814, "step": 895 }, { "epoch": 5.033802816901408, "grad_norm": 3.87629771232605, "learning_rate": 2.679e-06, "loss": 0.7244, "step": 896 }, { "epoch": 5.03943661971831, "grad_norm": 3.3396666049957275, "learning_rate": 2.682e-06, "loss": 0.681, "step": 897 }, { "epoch": 5.045070422535211, "grad_norm": 4.312685489654541, "learning_rate": 2.685e-06, "loss": 0.6109, "step": 898 }, { "epoch": 5.050704225352113, "grad_norm": 3.3607585430145264, "learning_rate": 2.688e-06, "loss": 0.6679, "step": 899 }, { "epoch": 5.056338028169014, "grad_norm": 4.933312892913818, "learning_rate": 2.691e-06, "loss": 0.6408, "step": 900 }, { "epoch": 5.061971830985915, "grad_norm": 7.953579425811768, "learning_rate": 2.6940000000000004e-06, "loss": 0.704, "step": 901 }, { "epoch": 5.067605633802817, "grad_norm": 2.636009931564331, "learning_rate": 2.6969999999999998e-06, "loss": 0.6687, "step": 902 }, { "epoch": 5.073239436619718, "grad_norm": 2.501701831817627, "learning_rate": 2.7e-06, "loss": 0.6241, "step": 903 }, { "epoch": 5.07887323943662, "grad_norm": 3.6819188594818115, "learning_rate": 2.703e-06, "loss": 0.7167, "step": 904 }, { "epoch": 5.084507042253521, "grad_norm": 2.071209192276001, "learning_rate": 2.706e-06, "loss": 0.5678, "step": 905 }, { "epoch": 5.090140845070422, "grad_norm": 2.5370724201202393, "learning_rate": 2.7090000000000002e-06, "loss": 0.6386, "step": 906 }, { "epoch": 5.095774647887324, "grad_norm": 5.760916233062744, "learning_rate": 2.712e-06, "loss": 0.6145, "step": 907 }, { "epoch": 5.101408450704225, "grad_norm": 2.104323148727417, "learning_rate": 2.715e-06, "loss": 0.5532, "step": 908 }, { "epoch": 5.107042253521127, "grad_norm": 2.2896480560302734, "learning_rate": 2.718e-06, "loss": 0.5573, "step": 909 }, { "epoch": 5.112676056338028, "grad_norm": 4.796744346618652, "learning_rate": 2.7210000000000003e-06, "loss": 0.4646, "step": 910 }, { "epoch": 5.118309859154929, "grad_norm": 2.22436785697937, "learning_rate": 2.724e-06, "loss": 0.5938, "step": 911 }, { "epoch": 5.123943661971831, "grad_norm": 5.623243808746338, "learning_rate": 2.727e-06, "loss": 0.5198, "step": 912 }, { "epoch": 5.129577464788732, "grad_norm": 2.2928428649902344, "learning_rate": 2.73e-06, "loss": 0.5933, "step": 913 }, { "epoch": 5.135211267605634, "grad_norm": 3.24739408493042, "learning_rate": 2.733e-06, "loss": 0.4768, "step": 914 }, { "epoch": 5.140845070422535, "grad_norm": 5.295236110687256, "learning_rate": 2.736e-06, "loss": 0.5845, "step": 915 }, { "epoch": 5.146478873239436, "grad_norm": 5.906332015991211, "learning_rate": 2.7390000000000004e-06, "loss": 0.536, "step": 916 }, { "epoch": 5.152112676056338, "grad_norm": 2.0323636531829834, "learning_rate": 2.7419999999999998e-06, "loss": 0.598, "step": 917 }, { "epoch": 5.157746478873239, "grad_norm": 2.347740650177002, "learning_rate": 2.745e-06, "loss": 0.4687, "step": 918 }, { "epoch": 5.163380281690141, "grad_norm": 5.7692999839782715, "learning_rate": 2.748e-06, "loss": 0.4441, "step": 919 }, { "epoch": 5.169014084507042, "grad_norm": 2.8650996685028076, "learning_rate": 2.751e-06, "loss": 0.6155, "step": 920 }, { "epoch": 5.174647887323943, "grad_norm": 2.7199292182922363, "learning_rate": 2.7540000000000002e-06, "loss": 0.5006, "step": 921 }, { "epoch": 5.180281690140845, "grad_norm": 2.181318521499634, "learning_rate": 2.757e-06, "loss": 0.4766, "step": 922 }, { "epoch": 5.185915492957746, "grad_norm": 2.543658971786499, "learning_rate": 2.76e-06, "loss": 0.5543, "step": 923 }, { "epoch": 5.191549295774648, "grad_norm": 7.329205513000488, "learning_rate": 2.763e-06, "loss": 0.4265, "step": 924 }, { "epoch": 5.197183098591549, "grad_norm": 2.5346627235412598, "learning_rate": 2.7660000000000003e-06, "loss": 0.4808, "step": 925 }, { "epoch": 5.20281690140845, "grad_norm": 2.384472370147705, "learning_rate": 2.7689999999999997e-06, "loss": 0.4395, "step": 926 }, { "epoch": 5.208450704225352, "grad_norm": 3.4939169883728027, "learning_rate": 2.772e-06, "loss": 0.4119, "step": 927 }, { "epoch": 5.214084507042253, "grad_norm": 5.23539924621582, "learning_rate": 2.775e-06, "loss": 0.4431, "step": 928 }, { "epoch": 5.219718309859155, "grad_norm": 2.3811333179473877, "learning_rate": 2.778e-06, "loss": 0.4725, "step": 929 }, { "epoch": 5.225352112676056, "grad_norm": 6.978809356689453, "learning_rate": 2.781e-06, "loss": 0.4696, "step": 930 }, { "epoch": 5.230985915492957, "grad_norm": 5.870415687561035, "learning_rate": 2.784e-06, "loss": 0.381, "step": 931 }, { "epoch": 5.236619718309859, "grad_norm": 4.045934677124023, "learning_rate": 2.787e-06, "loss": 0.6097, "step": 932 }, { "epoch": 5.24225352112676, "grad_norm": 21.88624382019043, "learning_rate": 2.79e-06, "loss": 0.3777, "step": 933 }, { "epoch": 5.247887323943662, "grad_norm": 3.4917380809783936, "learning_rate": 2.793e-06, "loss": 0.523, "step": 934 }, { "epoch": 5.253521126760563, "grad_norm": 2.8486669063568115, "learning_rate": 2.7960000000000004e-06, "loss": 0.7939, "step": 935 }, { "epoch": 5.259154929577464, "grad_norm": 3.822283983230591, "learning_rate": 2.799e-06, "loss": 0.8049, "step": 936 }, { "epoch": 5.264788732394366, "grad_norm": 3.4523532390594482, "learning_rate": 2.802e-06, "loss": 0.7975, "step": 937 }, { "epoch": 5.270422535211267, "grad_norm": 2.5025782585144043, "learning_rate": 2.8050000000000002e-06, "loss": 0.6506, "step": 938 }, { "epoch": 5.276056338028169, "grad_norm": 2.210494041442871, "learning_rate": 2.808e-06, "loss": 0.6887, "step": 939 }, { "epoch": 5.28169014084507, "grad_norm": 2.1525866985321045, "learning_rate": 2.8110000000000003e-06, "loss": 0.7456, "step": 940 }, { "epoch": 5.2873239436619714, "grad_norm": 6.6180338859558105, "learning_rate": 2.814e-06, "loss": 0.6736, "step": 941 }, { "epoch": 5.292957746478873, "grad_norm": 2.8995516300201416, "learning_rate": 2.817e-06, "loss": 0.6474, "step": 942 }, { "epoch": 5.298591549295774, "grad_norm": 2.2757349014282227, "learning_rate": 2.82e-06, "loss": 0.6859, "step": 943 }, { "epoch": 5.304225352112676, "grad_norm": 2.54069185256958, "learning_rate": 2.8230000000000003e-06, "loss": 0.5716, "step": 944 }, { "epoch": 5.309859154929577, "grad_norm": 3.2585108280181885, "learning_rate": 2.826e-06, "loss": 0.5769, "step": 945 }, { "epoch": 5.3154929577464785, "grad_norm": 2.3954012393951416, "learning_rate": 2.829e-06, "loss": 0.5716, "step": 946 }, { "epoch": 5.321126760563381, "grad_norm": 2.4710357189178467, "learning_rate": 2.832e-06, "loss": 0.5858, "step": 947 }, { "epoch": 5.326760563380281, "grad_norm": 2.4542315006256104, "learning_rate": 2.835e-06, "loss": 0.5595, "step": 948 }, { "epoch": 5.3323943661971835, "grad_norm": 4.040088176727295, "learning_rate": 2.838e-06, "loss": 0.4993, "step": 949 }, { "epoch": 5.338028169014084, "grad_norm": 2.6751508712768555, "learning_rate": 2.8410000000000004e-06, "loss": 0.5521, "step": 950 }, { "epoch": 5.343661971830986, "grad_norm": 1.8362388610839844, "learning_rate": 2.844e-06, "loss": 0.4828, "step": 951 }, { "epoch": 5.349295774647887, "grad_norm": 3.870666980743408, "learning_rate": 2.847e-06, "loss": 0.5741, "step": 952 }, { "epoch": 5.354929577464789, "grad_norm": 2.98319411277771, "learning_rate": 2.8500000000000002e-06, "loss": 0.4582, "step": 953 }, { "epoch": 5.3605633802816905, "grad_norm": 2.085663080215454, "learning_rate": 2.853e-06, "loss": 0.4475, "step": 954 }, { "epoch": 5.366197183098592, "grad_norm": 2.988067626953125, "learning_rate": 2.8560000000000003e-06, "loss": 0.5461, "step": 955 }, { "epoch": 5.371830985915493, "grad_norm": 2.501213312149048, "learning_rate": 2.859e-06, "loss": 0.5009, "step": 956 }, { "epoch": 5.377464788732395, "grad_norm": 5.369483947753906, "learning_rate": 2.862e-06, "loss": 0.4742, "step": 957 }, { "epoch": 5.383098591549296, "grad_norm": 3.1307785511016846, "learning_rate": 2.865e-06, "loss": 0.4614, "step": 958 }, { "epoch": 5.3887323943661976, "grad_norm": 2.3280138969421387, "learning_rate": 2.8680000000000003e-06, "loss": 0.4165, "step": 959 }, { "epoch": 5.394366197183099, "grad_norm": 3.9419243335723877, "learning_rate": 2.8709999999999997e-06, "loss": 0.4646, "step": 960 }, { "epoch": 5.4, "grad_norm": 1.6847516298294067, "learning_rate": 2.874e-06, "loss": 0.4, "step": 961 }, { "epoch": 5.405633802816902, "grad_norm": 2.56376576423645, "learning_rate": 2.877e-06, "loss": 0.4022, "step": 962 }, { "epoch": 5.411267605633803, "grad_norm": 2.88046932220459, "learning_rate": 2.88e-06, "loss": 0.4373, "step": 963 }, { "epoch": 5.416901408450705, "grad_norm": 2.5140233039855957, "learning_rate": 2.883e-06, "loss": 0.476, "step": 964 }, { "epoch": 5.422535211267606, "grad_norm": 3.400726079940796, "learning_rate": 2.886e-06, "loss": 0.3556, "step": 965 }, { "epoch": 5.428169014084507, "grad_norm": 2.35408091545105, "learning_rate": 2.8889999999999998e-06, "loss": 0.3892, "step": 966 }, { "epoch": 5.433802816901409, "grad_norm": 2.032426357269287, "learning_rate": 2.892e-06, "loss": 0.4039, "step": 967 }, { "epoch": 5.43943661971831, "grad_norm": 2.5404016971588135, "learning_rate": 2.8950000000000002e-06, "loss": 0.3942, "step": 968 }, { "epoch": 5.445070422535212, "grad_norm": 2.5330841541290283, "learning_rate": 2.898e-06, "loss": 0.3797, "step": 969 }, { "epoch": 5.450704225352113, "grad_norm": 6.841852188110352, "learning_rate": 2.901e-06, "loss": 0.4562, "step": 970 }, { "epoch": 5.456338028169014, "grad_norm": 2.308804988861084, "learning_rate": 2.904e-06, "loss": 0.5075, "step": 971 }, { "epoch": 5.461971830985916, "grad_norm": 3.835463523864746, "learning_rate": 2.907e-06, "loss": 0.3515, "step": 972 }, { "epoch": 5.467605633802817, "grad_norm": 8.132750511169434, "learning_rate": 2.91e-06, "loss": 0.4574, "step": 973 }, { "epoch": 5.473239436619719, "grad_norm": 2.29156756401062, "learning_rate": 2.9130000000000003e-06, "loss": 0.3985, "step": 974 }, { "epoch": 5.47887323943662, "grad_norm": 3.6257643699645996, "learning_rate": 2.916e-06, "loss": 0.4139, "step": 975 }, { "epoch": 5.484507042253521, "grad_norm": 2.5936882495880127, "learning_rate": 2.919e-06, "loss": 0.4021, "step": 976 }, { "epoch": 5.490140845070423, "grad_norm": 3.8019628524780273, "learning_rate": 2.922e-06, "loss": 0.476, "step": 977 }, { "epoch": 5.495774647887324, "grad_norm": 4.725766658782959, "learning_rate": 2.9250000000000004e-06, "loss": 0.4575, "step": 978 }, { "epoch": 5.501408450704226, "grad_norm": 5.978580951690674, "learning_rate": 2.928e-06, "loss": 0.8134, "step": 979 }, { "epoch": 5.507042253521127, "grad_norm": 2.873779773712158, "learning_rate": 2.931e-06, "loss": 0.74, "step": 980 }, { "epoch": 5.512676056338028, "grad_norm": 2.560568332672119, "learning_rate": 2.934e-06, "loss": 0.7099, "step": 981 }, { "epoch": 5.51830985915493, "grad_norm": 2.4004578590393066, "learning_rate": 2.937e-06, "loss": 0.6652, "step": 982 }, { "epoch": 5.523943661971831, "grad_norm": 3.904620409011841, "learning_rate": 2.9400000000000002e-06, "loss": 0.7448, "step": 983 }, { "epoch": 5.529577464788733, "grad_norm": 1.8310728073120117, "learning_rate": 2.9430000000000005e-06, "loss": 0.6161, "step": 984 }, { "epoch": 5.535211267605634, "grad_norm": 8.018976211547852, "learning_rate": 2.946e-06, "loss": 0.5445, "step": 985 }, { "epoch": 5.540845070422535, "grad_norm": 2.1463332176208496, "learning_rate": 2.949e-06, "loss": 0.6716, "step": 986 }, { "epoch": 5.546478873239437, "grad_norm": 2.329373359680176, "learning_rate": 2.9520000000000003e-06, "loss": 0.5893, "step": 987 }, { "epoch": 5.552112676056338, "grad_norm": 2.2067439556121826, "learning_rate": 2.955e-06, "loss": 0.5582, "step": 988 }, { "epoch": 5.55774647887324, "grad_norm": 2.535163164138794, "learning_rate": 2.958e-06, "loss": 0.6627, "step": 989 }, { "epoch": 5.563380281690141, "grad_norm": 2.639993190765381, "learning_rate": 2.961e-06, "loss": 0.4677, "step": 990 }, { "epoch": 5.569014084507042, "grad_norm": 2.374748945236206, "learning_rate": 2.964e-06, "loss": 0.5139, "step": 991 }, { "epoch": 5.574647887323944, "grad_norm": 7.6541314125061035, "learning_rate": 2.967e-06, "loss": 0.4792, "step": 992 }, { "epoch": 5.580281690140845, "grad_norm": 1.8786181211471558, "learning_rate": 2.9700000000000004e-06, "loss": 0.4939, "step": 993 }, { "epoch": 5.585915492957747, "grad_norm": 2.8175878524780273, "learning_rate": 2.9729999999999997e-06, "loss": 0.5425, "step": 994 }, { "epoch": 5.591549295774648, "grad_norm": 4.114474773406982, "learning_rate": 2.976e-06, "loss": 0.4222, "step": 995 }, { "epoch": 5.597183098591549, "grad_norm": 7.6624860763549805, "learning_rate": 2.979e-06, "loss": 0.4513, "step": 996 }, { "epoch": 5.602816901408451, "grad_norm": 1.9664232730865479, "learning_rate": 2.982e-06, "loss": 0.4046, "step": 997 }, { "epoch": 5.608450704225352, "grad_norm": 2.2487237453460693, "learning_rate": 2.9850000000000002e-06, "loss": 0.4825, "step": 998 }, { "epoch": 5.614084507042254, "grad_norm": 2.0302340984344482, "learning_rate": 2.988e-06, "loss": 0.5438, "step": 999 }, { "epoch": 5.619718309859155, "grad_norm": 2.2003347873687744, "learning_rate": 2.991e-06, "loss": 0.4609, "step": 1000 }, { "epoch": 5.619718309859155, "eval_cer": 0.1952717940741893, "eval_loss": 0.7326682209968567, "eval_runtime": 17.4904, "eval_samples_per_second": 17.381, "eval_steps_per_second": 0.572, "eval_wer": 0.6745970836531082, "step": 1000 }, { "epoch": 5.625352112676056, "grad_norm": 1.9740064144134521, "learning_rate": 2.994e-06, "loss": 0.4549, "step": 1001 }, { "epoch": 5.630985915492958, "grad_norm": 3.390360116958618, "learning_rate": 2.9970000000000003e-06, "loss": 0.4063, "step": 1002 }, { "epoch": 5.636619718309859, "grad_norm": 3.5713820457458496, "learning_rate": 3e-06, "loss": 0.3822, "step": 1003 }, { "epoch": 5.642253521126761, "grad_norm": 1.9873765707015991, "learning_rate": 3.003e-06, "loss": 0.4433, "step": 1004 }, { "epoch": 5.647887323943662, "grad_norm": 2.8761491775512695, "learning_rate": 3.006e-06, "loss": 0.4648, "step": 1005 }, { "epoch": 5.653521126760563, "grad_norm": 1.7977397441864014, "learning_rate": 3.009e-06, "loss": 0.3642, "step": 1006 }, { "epoch": 5.659154929577465, "grad_norm": 3.600468158721924, "learning_rate": 3.012e-06, "loss": 0.4131, "step": 1007 }, { "epoch": 5.664788732394366, "grad_norm": 3.388821840286255, "learning_rate": 3.0150000000000004e-06, "loss": 0.425, "step": 1008 }, { "epoch": 5.670422535211268, "grad_norm": 2.8823421001434326, "learning_rate": 3.0179999999999997e-06, "loss": 0.3208, "step": 1009 }, { "epoch": 5.676056338028169, "grad_norm": 3.7964141368865967, "learning_rate": 3.021e-06, "loss": 0.3602, "step": 1010 }, { "epoch": 5.68169014084507, "grad_norm": 2.2682909965515137, "learning_rate": 3.024e-06, "loss": 0.371, "step": 1011 }, { "epoch": 5.687323943661972, "grad_norm": 3.282222270965576, "learning_rate": 3.027e-06, "loss": 0.4358, "step": 1012 }, { "epoch": 5.692957746478873, "grad_norm": 3.3907766342163086, "learning_rate": 3.0300000000000002e-06, "loss": 0.3991, "step": 1013 }, { "epoch": 5.698591549295775, "grad_norm": 3.360041379928589, "learning_rate": 3.033e-06, "loss": 0.4788, "step": 1014 }, { "epoch": 5.704225352112676, "grad_norm": 2.692408561706543, "learning_rate": 3.036e-06, "loss": 0.3784, "step": 1015 }, { "epoch": 5.709859154929577, "grad_norm": 4.024622440338135, "learning_rate": 3.039e-06, "loss": 0.3587, "step": 1016 }, { "epoch": 5.715492957746479, "grad_norm": 2.869748115539551, "learning_rate": 3.0420000000000003e-06, "loss": 0.3586, "step": 1017 }, { "epoch": 5.72112676056338, "grad_norm": 5.108041763305664, "learning_rate": 3.0450000000000005e-06, "loss": 0.363, "step": 1018 }, { "epoch": 5.726760563380282, "grad_norm": 2.0021114349365234, "learning_rate": 3.048e-06, "loss": 0.3183, "step": 1019 }, { "epoch": 5.732394366197183, "grad_norm": 2.743074893951416, "learning_rate": 3.051e-06, "loss": 0.298, "step": 1020 }, { "epoch": 5.738028169014084, "grad_norm": 4.111669063568115, "learning_rate": 3.0540000000000003e-06, "loss": 0.3195, "step": 1021 }, { "epoch": 5.743661971830986, "grad_norm": 4.282331943511963, "learning_rate": 3.057e-06, "loss": 0.3188, "step": 1022 }, { "epoch": 5.749295774647887, "grad_norm": 4.695259094238281, "learning_rate": 3.06e-06, "loss": 0.8676, "step": 1023 }, { "epoch": 5.754929577464789, "grad_norm": 2.486351728439331, "learning_rate": 3.063e-06, "loss": 0.7056, "step": 1024 }, { "epoch": 5.76056338028169, "grad_norm": 3.260338068008423, "learning_rate": 3.066e-06, "loss": 0.7452, "step": 1025 }, { "epoch": 5.766197183098591, "grad_norm": 4.008510112762451, "learning_rate": 3.069e-06, "loss": 0.735, "step": 1026 }, { "epoch": 5.771830985915493, "grad_norm": 3.9532523155212402, "learning_rate": 3.0720000000000004e-06, "loss": 0.61, "step": 1027 }, { "epoch": 5.777464788732394, "grad_norm": 3.0018396377563477, "learning_rate": 3.0749999999999998e-06, "loss": 0.6123, "step": 1028 }, { "epoch": 5.783098591549296, "grad_norm": 2.110142469406128, "learning_rate": 3.078e-06, "loss": 0.6005, "step": 1029 }, { "epoch": 5.788732394366197, "grad_norm": 2.5541582107543945, "learning_rate": 3.0810000000000002e-06, "loss": 0.6066, "step": 1030 }, { "epoch": 5.794366197183098, "grad_norm": 2.268420934677124, "learning_rate": 3.084e-06, "loss": 0.5635, "step": 1031 }, { "epoch": 5.8, "grad_norm": 5.7473039627075195, "learning_rate": 3.0870000000000003e-06, "loss": 0.6541, "step": 1032 }, { "epoch": 5.805633802816901, "grad_norm": 2.6688060760498047, "learning_rate": 3.09e-06, "loss": 0.524, "step": 1033 }, { "epoch": 5.811267605633803, "grad_norm": 2.1956958770751953, "learning_rate": 3.093e-06, "loss": 0.5255, "step": 1034 }, { "epoch": 5.816901408450704, "grad_norm": 13.65053653717041, "learning_rate": 3.096e-06, "loss": 0.5631, "step": 1035 }, { "epoch": 5.822535211267605, "grad_norm": 3.543541431427002, "learning_rate": 3.0990000000000003e-06, "loss": 0.4932, "step": 1036 }, { "epoch": 5.828169014084507, "grad_norm": 2.932461977005005, "learning_rate": 3.102e-06, "loss": 0.5264, "step": 1037 }, { "epoch": 5.833802816901408, "grad_norm": 1.9047507047653198, "learning_rate": 3.105e-06, "loss": 0.5509, "step": 1038 }, { "epoch": 5.83943661971831, "grad_norm": 1.764613389968872, "learning_rate": 3.108e-06, "loss": 0.4645, "step": 1039 }, { "epoch": 5.845070422535211, "grad_norm": 1.8868327140808105, "learning_rate": 3.111e-06, "loss": 0.471, "step": 1040 }, { "epoch": 5.850704225352112, "grad_norm": 2.461768388748169, "learning_rate": 3.114e-06, "loss": 0.4921, "step": 1041 }, { "epoch": 5.856338028169014, "grad_norm": 2.245302200317383, "learning_rate": 3.1170000000000004e-06, "loss": 0.3832, "step": 1042 }, { "epoch": 5.861971830985915, "grad_norm": 2.2288429737091064, "learning_rate": 3.1199999999999998e-06, "loss": 0.4716, "step": 1043 }, { "epoch": 5.867605633802817, "grad_norm": 2.7764408588409424, "learning_rate": 3.123e-06, "loss": 0.3596, "step": 1044 }, { "epoch": 5.873239436619718, "grad_norm": 2.285722017288208, "learning_rate": 3.1260000000000002e-06, "loss": 0.4281, "step": 1045 }, { "epoch": 5.878873239436619, "grad_norm": 2.382336378097534, "learning_rate": 3.129e-06, "loss": 0.4693, "step": 1046 }, { "epoch": 5.884507042253521, "grad_norm": 3.444363832473755, "learning_rate": 3.1320000000000003e-06, "loss": 0.3467, "step": 1047 }, { "epoch": 5.890140845070422, "grad_norm": 1.9805412292480469, "learning_rate": 3.135e-06, "loss": 0.4168, "step": 1048 }, { "epoch": 5.895774647887324, "grad_norm": 1.9371024370193481, "learning_rate": 3.138e-06, "loss": 0.3567, "step": 1049 }, { "epoch": 5.901408450704225, "grad_norm": 15.120765686035156, "learning_rate": 3.141e-06, "loss": 0.3586, "step": 1050 }, { "epoch": 5.907042253521126, "grad_norm": 3.0226802825927734, "learning_rate": 3.1440000000000003e-06, "loss": 0.2934, "step": 1051 }, { "epoch": 5.912676056338028, "grad_norm": 5.908453941345215, "learning_rate": 3.1469999999999997e-06, "loss": 0.3278, "step": 1052 }, { "epoch": 5.918309859154929, "grad_norm": 2.0846734046936035, "learning_rate": 3.15e-06, "loss": 0.3469, "step": 1053 }, { "epoch": 5.923943661971831, "grad_norm": 2.7666261196136475, "learning_rate": 3.153e-06, "loss": 0.4525, "step": 1054 }, { "epoch": 5.929577464788732, "grad_norm": 2.7881827354431152, "learning_rate": 3.156e-06, "loss": 0.4293, "step": 1055 }, { "epoch": 5.9352112676056334, "grad_norm": 11.611705780029297, "learning_rate": 3.159e-06, "loss": 0.5026, "step": 1056 }, { "epoch": 5.940845070422535, "grad_norm": 2.3607640266418457, "learning_rate": 3.162e-06, "loss": 0.3474, "step": 1057 }, { "epoch": 5.946478873239436, "grad_norm": 2.2724802494049072, "learning_rate": 3.1649999999999998e-06, "loss": 0.2915, "step": 1058 }, { "epoch": 5.952112676056338, "grad_norm": 2.3175947666168213, "learning_rate": 3.168e-06, "loss": 0.3747, "step": 1059 }, { "epoch": 5.957746478873239, "grad_norm": 4.291808128356934, "learning_rate": 3.1710000000000002e-06, "loss": 0.3298, "step": 1060 }, { "epoch": 5.9633802816901404, "grad_norm": 3.045258045196533, "learning_rate": 3.1740000000000004e-06, "loss": 0.3207, "step": 1061 }, { "epoch": 5.969014084507043, "grad_norm": 12.683424949645996, "learning_rate": 3.177e-06, "loss": 0.3784, "step": 1062 }, { "epoch": 5.974647887323943, "grad_norm": 2.6699745655059814, "learning_rate": 3.18e-06, "loss": 0.3789, "step": 1063 }, { "epoch": 5.9802816901408455, "grad_norm": 2.9239308834075928, "learning_rate": 3.1830000000000003e-06, "loss": 0.3064, "step": 1064 }, { "epoch": 5.985915492957746, "grad_norm": 4.623414993286133, "learning_rate": 3.186e-06, "loss": 0.3555, "step": 1065 }, { "epoch": 5.991549295774648, "grad_norm": 4.558917999267578, "learning_rate": 3.1890000000000003e-06, "loss": 0.3352, "step": 1066 }, { "epoch": 5.997183098591549, "grad_norm": 2.830547332763672, "learning_rate": 3.192e-06, "loss": 0.4056, "step": 1067 }, { "epoch": 6.0, "grad_norm": 2.560009241104126, "learning_rate": 3.195e-06, "loss": 0.1316, "step": 1068 }, { "epoch": 6.005633802816901, "grad_norm": 2.8757383823394775, "learning_rate": 3.198e-06, "loss": 0.7323, "step": 1069 }, { "epoch": 6.011267605633803, "grad_norm": 4.248073577880859, "learning_rate": 3.2010000000000004e-06, "loss": 0.6524, "step": 1070 }, { "epoch": 6.016901408450704, "grad_norm": 3.0824227333068848, "learning_rate": 3.204e-06, "loss": 0.627, "step": 1071 }, { "epoch": 6.022535211267606, "grad_norm": 2.5614023208618164, "learning_rate": 3.207e-06, "loss": 0.5918, "step": 1072 }, { "epoch": 6.028169014084507, "grad_norm": 1.8798631429672241, "learning_rate": 3.21e-06, "loss": 0.6242, "step": 1073 }, { "epoch": 6.033802816901408, "grad_norm": 2.9014124870300293, "learning_rate": 3.213e-06, "loss": 0.5471, "step": 1074 }, { "epoch": 6.03943661971831, "grad_norm": 2.8454902172088623, "learning_rate": 3.216e-06, "loss": 0.5102, "step": 1075 }, { "epoch": 6.045070422535211, "grad_norm": 2.481560707092285, "learning_rate": 3.2190000000000004e-06, "loss": 0.4679, "step": 1076 }, { "epoch": 6.050704225352113, "grad_norm": 2.235304117202759, "learning_rate": 3.222e-06, "loss": 0.5686, "step": 1077 }, { "epoch": 6.056338028169014, "grad_norm": 2.3935916423797607, "learning_rate": 3.225e-06, "loss": 0.4733, "step": 1078 }, { "epoch": 6.061971830985915, "grad_norm": 2.3465304374694824, "learning_rate": 3.2280000000000003e-06, "loss": 0.4731, "step": 1079 }, { "epoch": 6.067605633802817, "grad_norm": 9.101483345031738, "learning_rate": 3.231e-06, "loss": 0.5751, "step": 1080 }, { "epoch": 6.073239436619718, "grad_norm": 2.8640172481536865, "learning_rate": 3.2340000000000003e-06, "loss": 0.4777, "step": 1081 }, { "epoch": 6.07887323943662, "grad_norm": 2.607933759689331, "learning_rate": 3.237e-06, "loss": 0.5388, "step": 1082 }, { "epoch": 6.084507042253521, "grad_norm": 2.7978246212005615, "learning_rate": 3.24e-06, "loss": 0.4931, "step": 1083 }, { "epoch": 6.090140845070422, "grad_norm": 2.214951753616333, "learning_rate": 3.243e-06, "loss": 0.4927, "step": 1084 }, { "epoch": 6.095774647887324, "grad_norm": 2.930983543395996, "learning_rate": 3.2460000000000003e-06, "loss": 0.5953, "step": 1085 }, { "epoch": 6.101408450704225, "grad_norm": 4.329953193664551, "learning_rate": 3.2489999999999997e-06, "loss": 0.3811, "step": 1086 }, { "epoch": 6.107042253521127, "grad_norm": 2.5319600105285645, "learning_rate": 3.252e-06, "loss": 0.4729, "step": 1087 }, { "epoch": 6.112676056338028, "grad_norm": 2.1831231117248535, "learning_rate": 3.255e-06, "loss": 0.3801, "step": 1088 }, { "epoch": 6.118309859154929, "grad_norm": 2.469048500061035, "learning_rate": 3.258e-06, "loss": 0.4558, "step": 1089 }, { "epoch": 6.123943661971831, "grad_norm": 2.3994851112365723, "learning_rate": 3.261e-06, "loss": 0.3516, "step": 1090 }, { "epoch": 6.129577464788732, "grad_norm": 1.8267234563827515, "learning_rate": 3.264e-06, "loss": 0.4166, "step": 1091 }, { "epoch": 6.135211267605634, "grad_norm": 2.312739133834839, "learning_rate": 3.267e-06, "loss": 0.3884, "step": 1092 }, { "epoch": 6.140845070422535, "grad_norm": 2.5899667739868164, "learning_rate": 3.27e-06, "loss": 0.3147, "step": 1093 }, { "epoch": 6.146478873239436, "grad_norm": 2.8650119304656982, "learning_rate": 3.2730000000000003e-06, "loss": 0.4618, "step": 1094 }, { "epoch": 6.152112676056338, "grad_norm": 4.954709529876709, "learning_rate": 3.276e-06, "loss": 0.3718, "step": 1095 }, { "epoch": 6.157746478873239, "grad_norm": 3.2749619483947754, "learning_rate": 3.279e-06, "loss": 0.2865, "step": 1096 }, { "epoch": 6.163380281690141, "grad_norm": 3.2736098766326904, "learning_rate": 3.282e-06, "loss": 0.4207, "step": 1097 }, { "epoch": 6.169014084507042, "grad_norm": 2.515049934387207, "learning_rate": 3.285e-06, "loss": 0.4335, "step": 1098 }, { "epoch": 6.174647887323943, "grad_norm": 1.8884328603744507, "learning_rate": 3.288e-06, "loss": 0.3095, "step": 1099 }, { "epoch": 6.180281690140845, "grad_norm": 2.9493768215179443, "learning_rate": 3.2910000000000003e-06, "loss": 0.3148, "step": 1100 }, { "epoch": 6.185915492957746, "grad_norm": 7.112857818603516, "learning_rate": 3.2939999999999997e-06, "loss": 0.2604, "step": 1101 }, { "epoch": 6.191549295774648, "grad_norm": 2.131307363510132, "learning_rate": 3.297e-06, "loss": 0.325, "step": 1102 }, { "epoch": 6.197183098591549, "grad_norm": 2.433164119720459, "learning_rate": 3.3e-06, "loss": 0.3075, "step": 1103 }, { "epoch": 6.20281690140845, "grad_norm": 2.7520127296447754, "learning_rate": 3.3030000000000004e-06, "loss": 0.3609, "step": 1104 }, { "epoch": 6.208450704225352, "grad_norm": 1.7646235227584839, "learning_rate": 3.306e-06, "loss": 0.3364, "step": 1105 }, { "epoch": 6.214084507042253, "grad_norm": 1.751688003540039, "learning_rate": 3.309e-06, "loss": 0.3657, "step": 1106 }, { "epoch": 6.219718309859155, "grad_norm": 2.7968740463256836, "learning_rate": 3.3120000000000002e-06, "loss": 0.3604, "step": 1107 }, { "epoch": 6.225352112676056, "grad_norm": 5.964559555053711, "learning_rate": 3.315e-06, "loss": 0.3176, "step": 1108 }, { "epoch": 6.230985915492957, "grad_norm": 2.9316728115081787, "learning_rate": 3.3180000000000003e-06, "loss": 0.3361, "step": 1109 }, { "epoch": 6.236619718309859, "grad_norm": 2.7882907390594482, "learning_rate": 3.3210000000000005e-06, "loss": 0.3378, "step": 1110 }, { "epoch": 6.24225352112676, "grad_norm": 4.655933856964111, "learning_rate": 3.324e-06, "loss": 0.33, "step": 1111 }, { "epoch": 6.247887323943662, "grad_norm": 2.307751178741455, "learning_rate": 3.327e-06, "loss": 0.252, "step": 1112 }, { "epoch": 6.253521126760563, "grad_norm": 5.10330057144165, "learning_rate": 3.3300000000000003e-06, "loss": 0.7075, "step": 1113 }, { "epoch": 6.259154929577464, "grad_norm": 3.3552329540252686, "learning_rate": 3.333e-06, "loss": 0.5958, "step": 1114 }, { "epoch": 6.264788732394366, "grad_norm": 3.4242026805877686, "learning_rate": 3.336e-06, "loss": 0.5177, "step": 1115 }, { "epoch": 6.270422535211267, "grad_norm": 2.5406289100646973, "learning_rate": 3.339e-06, "loss": 0.6254, "step": 1116 }, { "epoch": 6.276056338028169, "grad_norm": 2.3175554275512695, "learning_rate": 3.342e-06, "loss": 0.5877, "step": 1117 }, { "epoch": 6.28169014084507, "grad_norm": 1.9484251737594604, "learning_rate": 3.345e-06, "loss": 0.4614, "step": 1118 }, { "epoch": 6.2873239436619714, "grad_norm": 2.1169261932373047, "learning_rate": 3.3480000000000004e-06, "loss": 0.5378, "step": 1119 }, { "epoch": 6.292957746478873, "grad_norm": 2.1809768676757812, "learning_rate": 3.3509999999999998e-06, "loss": 0.4658, "step": 1120 }, { "epoch": 6.298591549295774, "grad_norm": 2.5709948539733887, "learning_rate": 3.354e-06, "loss": 0.5651, "step": 1121 }, { "epoch": 6.304225352112676, "grad_norm": 1.767427682876587, "learning_rate": 3.3570000000000002e-06, "loss": 0.4431, "step": 1122 }, { "epoch": 6.309859154929577, "grad_norm": 2.072402000427246, "learning_rate": 3.36e-06, "loss": 0.5124, "step": 1123 }, { "epoch": 6.3154929577464785, "grad_norm": 2.190497636795044, "learning_rate": 3.3630000000000002e-06, "loss": 0.4368, "step": 1124 }, { "epoch": 6.321126760563381, "grad_norm": 2.1287357807159424, "learning_rate": 3.366e-06, "loss": 0.449, "step": 1125 }, { "epoch": 6.326760563380281, "grad_norm": 2.1951074600219727, "learning_rate": 3.369e-06, "loss": 0.4802, "step": 1126 }, { "epoch": 6.3323943661971835, "grad_norm": 2.4533660411834717, "learning_rate": 3.372e-06, "loss": 0.4826, "step": 1127 }, { "epoch": 6.338028169014084, "grad_norm": 1.8029705286026, "learning_rate": 3.3750000000000003e-06, "loss": 0.4092, "step": 1128 }, { "epoch": 6.343661971830986, "grad_norm": 1.9082602262496948, "learning_rate": 3.378e-06, "loss": 0.3362, "step": 1129 }, { "epoch": 6.349295774647887, "grad_norm": 2.352720260620117, "learning_rate": 3.381e-06, "loss": 0.4837, "step": 1130 }, { "epoch": 6.354929577464789, "grad_norm": 2.1292738914489746, "learning_rate": 3.384e-06, "loss": 0.3537, "step": 1131 }, { "epoch": 6.3605633802816905, "grad_norm": 2.459183692932129, "learning_rate": 3.387e-06, "loss": 0.3538, "step": 1132 }, { "epoch": 6.366197183098592, "grad_norm": 2.447366237640381, "learning_rate": 3.39e-06, "loss": 0.355, "step": 1133 }, { "epoch": 6.371830985915493, "grad_norm": 4.108922004699707, "learning_rate": 3.3930000000000004e-06, "loss": 0.3766, "step": 1134 }, { "epoch": 6.377464788732395, "grad_norm": 2.0675840377807617, "learning_rate": 3.3959999999999998e-06, "loss": 0.4792, "step": 1135 }, { "epoch": 6.383098591549296, "grad_norm": 3.7002816200256348, "learning_rate": 3.399e-06, "loss": 0.3849, "step": 1136 }, { "epoch": 6.3887323943661976, "grad_norm": 2.943373441696167, "learning_rate": 3.402e-06, "loss": 0.4129, "step": 1137 }, { "epoch": 6.394366197183099, "grad_norm": 1.837854266166687, "learning_rate": 3.405e-06, "loss": 0.3353, "step": 1138 }, { "epoch": 6.4, "grad_norm": 1.5776747465133667, "learning_rate": 3.4080000000000002e-06, "loss": 0.3492, "step": 1139 }, { "epoch": 6.405633802816902, "grad_norm": 1.4969834089279175, "learning_rate": 3.411e-06, "loss": 0.2672, "step": 1140 }, { "epoch": 6.411267605633803, "grad_norm": 1.4635839462280273, "learning_rate": 3.414e-06, "loss": 0.2685, "step": 1141 }, { "epoch": 6.416901408450705, "grad_norm": 2.5149481296539307, "learning_rate": 3.417e-06, "loss": 0.367, "step": 1142 }, { "epoch": 6.422535211267606, "grad_norm": 2.035217046737671, "learning_rate": 3.4200000000000003e-06, "loss": 0.2472, "step": 1143 }, { "epoch": 6.428169014084507, "grad_norm": 2.5293378829956055, "learning_rate": 3.4229999999999997e-06, "loss": 0.3348, "step": 1144 }, { "epoch": 6.433802816901409, "grad_norm": 2.428393840789795, "learning_rate": 3.426e-06, "loss": 0.2959, "step": 1145 }, { "epoch": 6.43943661971831, "grad_norm": 2.4428305625915527, "learning_rate": 3.429e-06, "loss": 0.3045, "step": 1146 }, { "epoch": 6.445070422535212, "grad_norm": 1.8863011598587036, "learning_rate": 3.4320000000000003e-06, "loss": 0.2704, "step": 1147 }, { "epoch": 6.450704225352113, "grad_norm": 2.2741410732269287, "learning_rate": 3.435e-06, "loss": 0.3013, "step": 1148 }, { "epoch": 6.456338028169014, "grad_norm": 2.163501501083374, "learning_rate": 3.438e-06, "loss": 0.2811, "step": 1149 }, { "epoch": 6.461971830985916, "grad_norm": 2.008511543273926, "learning_rate": 3.441e-06, "loss": 0.2719, "step": 1150 }, { "epoch": 6.467605633802817, "grad_norm": 2.1432340145111084, "learning_rate": 3.444e-06, "loss": 0.2997, "step": 1151 }, { "epoch": 6.473239436619719, "grad_norm": 2.3122198581695557, "learning_rate": 3.447e-06, "loss": 0.3962, "step": 1152 }, { "epoch": 6.47887323943662, "grad_norm": 2.081256628036499, "learning_rate": 3.4500000000000004e-06, "loss": 0.2956, "step": 1153 }, { "epoch": 6.484507042253521, "grad_norm": 1.8960202932357788, "learning_rate": 3.453e-06, "loss": 0.2381, "step": 1154 }, { "epoch": 6.490140845070423, "grad_norm": 2.7031049728393555, "learning_rate": 3.456e-06, "loss": 0.3322, "step": 1155 }, { "epoch": 6.495774647887324, "grad_norm": 2.5131025314331055, "learning_rate": 3.4590000000000003e-06, "loss": 0.3824, "step": 1156 }, { "epoch": 6.501408450704226, "grad_norm": 6.784069061279297, "learning_rate": 3.462e-06, "loss": 0.7116, "step": 1157 }, { "epoch": 6.507042253521127, "grad_norm": 2.2185564041137695, "learning_rate": 3.4650000000000003e-06, "loss": 0.5711, "step": 1158 }, { "epoch": 6.512676056338028, "grad_norm": 4.720303535461426, "learning_rate": 3.468e-06, "loss": 0.649, "step": 1159 }, { "epoch": 6.51830985915493, "grad_norm": 2.771867036819458, "learning_rate": 3.471e-06, "loss": 0.5, "step": 1160 }, { "epoch": 6.523943661971831, "grad_norm": 3.789430618286133, "learning_rate": 3.474e-06, "loss": 0.5229, "step": 1161 }, { "epoch": 6.529577464788733, "grad_norm": 2.8106529712677, "learning_rate": 3.4770000000000003e-06, "loss": 0.5269, "step": 1162 }, { "epoch": 6.535211267605634, "grad_norm": 4.296682357788086, "learning_rate": 3.48e-06, "loss": 0.5168, "step": 1163 }, { "epoch": 6.540845070422535, "grad_norm": 1.9336687326431274, "learning_rate": 3.483e-06, "loss": 0.5049, "step": 1164 }, { "epoch": 6.546478873239437, "grad_norm": 2.550758123397827, "learning_rate": 3.486e-06, "loss": 0.4953, "step": 1165 }, { "epoch": 6.552112676056338, "grad_norm": 21.155317306518555, "learning_rate": 3.489e-06, "loss": 0.4319, "step": 1166 }, { "epoch": 6.55774647887324, "grad_norm": 2.251248359680176, "learning_rate": 3.492e-06, "loss": 0.391, "step": 1167 }, { "epoch": 6.563380281690141, "grad_norm": 2.381747245788574, "learning_rate": 3.4950000000000004e-06, "loss": 0.47, "step": 1168 }, { "epoch": 6.569014084507042, "grad_norm": 2.979938268661499, "learning_rate": 3.498e-06, "loss": 0.3958, "step": 1169 }, { "epoch": 6.574647887323944, "grad_norm": 1.639154314994812, "learning_rate": 3.501e-06, "loss": 0.4365, "step": 1170 }, { "epoch": 6.580281690140845, "grad_norm": 2.7278690338134766, "learning_rate": 3.5040000000000002e-06, "loss": 0.3803, "step": 1171 }, { "epoch": 6.585915492957747, "grad_norm": 1.96522855758667, "learning_rate": 3.507e-06, "loss": 0.4474, "step": 1172 }, { "epoch": 6.591549295774648, "grad_norm": 2.044724225997925, "learning_rate": 3.5100000000000003e-06, "loss": 0.2917, "step": 1173 }, { "epoch": 6.597183098591549, "grad_norm": 2.6340885162353516, "learning_rate": 3.513e-06, "loss": 0.3838, "step": 1174 }, { "epoch": 6.602816901408451, "grad_norm": 2.6529457569122314, "learning_rate": 3.516e-06, "loss": 0.363, "step": 1175 }, { "epoch": 6.608450704225352, "grad_norm": 1.9991381168365479, "learning_rate": 3.519e-06, "loss": 0.3632, "step": 1176 }, { "epoch": 6.614084507042254, "grad_norm": 1.672012448310852, "learning_rate": 3.5220000000000003e-06, "loss": 0.3172, "step": 1177 }, { "epoch": 6.619718309859155, "grad_norm": 2.4515981674194336, "learning_rate": 3.5249999999999997e-06, "loss": 0.4061, "step": 1178 }, { "epoch": 6.625352112676056, "grad_norm": 2.483896017074585, "learning_rate": 3.528e-06, "loss": 0.3704, "step": 1179 }, { "epoch": 6.630985915492958, "grad_norm": 1.8844904899597168, "learning_rate": 3.531e-06, "loss": 0.2842, "step": 1180 }, { "epoch": 6.636619718309859, "grad_norm": 2.03861665725708, "learning_rate": 3.534e-06, "loss": 0.2909, "step": 1181 }, { "epoch": 6.642253521126761, "grad_norm": 1.7440897226333618, "learning_rate": 3.537e-06, "loss": 0.3144, "step": 1182 }, { "epoch": 6.647887323943662, "grad_norm": 1.7724956274032593, "learning_rate": 3.54e-06, "loss": 0.2973, "step": 1183 }, { "epoch": 6.653521126760563, "grad_norm": 2.2687344551086426, "learning_rate": 3.543e-06, "loss": 0.355, "step": 1184 }, { "epoch": 6.659154929577465, "grad_norm": 2.4889185428619385, "learning_rate": 3.546e-06, "loss": 0.314, "step": 1185 }, { "epoch": 6.664788732394366, "grad_norm": 1.5463001728057861, "learning_rate": 3.5490000000000002e-06, "loss": 0.2498, "step": 1186 }, { "epoch": 6.670422535211268, "grad_norm": 10.992419242858887, "learning_rate": 3.552e-06, "loss": 0.3354, "step": 1187 }, { "epoch": 6.676056338028169, "grad_norm": 1.8986955881118774, "learning_rate": 3.555e-06, "loss": 0.2431, "step": 1188 }, { "epoch": 6.68169014084507, "grad_norm": 2.871622323989868, "learning_rate": 3.558e-06, "loss": 0.4735, "step": 1189 }, { "epoch": 6.687323943661972, "grad_norm": 2.1645452976226807, "learning_rate": 3.5610000000000003e-06, "loss": 0.4348, "step": 1190 }, { "epoch": 6.692957746478873, "grad_norm": 3.236179828643799, "learning_rate": 3.564e-06, "loss": 0.2616, "step": 1191 }, { "epoch": 6.698591549295775, "grad_norm": 3.038219451904297, "learning_rate": 3.5670000000000003e-06, "loss": 0.3257, "step": 1192 }, { "epoch": 6.704225352112676, "grad_norm": 4.6703362464904785, "learning_rate": 3.57e-06, "loss": 0.4107, "step": 1193 }, { "epoch": 6.709859154929577, "grad_norm": 2.761137008666992, "learning_rate": 3.573e-06, "loss": 0.2989, "step": 1194 }, { "epoch": 6.715492957746479, "grad_norm": 2.4389891624450684, "learning_rate": 3.576e-06, "loss": 0.2375, "step": 1195 }, { "epoch": 6.72112676056338, "grad_norm": 2.9157509803771973, "learning_rate": 3.5790000000000004e-06, "loss": 0.2798, "step": 1196 }, { "epoch": 6.726760563380282, "grad_norm": 1.9337164163589478, "learning_rate": 3.582e-06, "loss": 0.2251, "step": 1197 }, { "epoch": 6.732394366197183, "grad_norm": 2.184020519256592, "learning_rate": 3.585e-06, "loss": 0.2249, "step": 1198 }, { "epoch": 6.738028169014084, "grad_norm": 8.07984447479248, "learning_rate": 3.588e-06, "loss": 0.369, "step": 1199 }, { "epoch": 6.743661971830986, "grad_norm": 2.855316638946533, "learning_rate": 3.591e-06, "loss": 0.297, "step": 1200 }, { "epoch": 6.749295774647887, "grad_norm": 4.808024883270264, "learning_rate": 3.5940000000000002e-06, "loss": 0.5577, "step": 1201 }, { "epoch": 6.754929577464789, "grad_norm": 5.586759090423584, "learning_rate": 3.5970000000000005e-06, "loss": 0.706, "step": 1202 }, { "epoch": 6.76056338028169, "grad_norm": 2.559178590774536, "learning_rate": 3.6e-06, "loss": 0.5853, "step": 1203 }, { "epoch": 6.766197183098591, "grad_norm": 2.225196123123169, "learning_rate": 3.603e-06, "loss": 0.5433, "step": 1204 }, { "epoch": 6.771830985915493, "grad_norm": 3.145524501800537, "learning_rate": 3.6060000000000003e-06, "loss": 0.525, "step": 1205 }, { "epoch": 6.777464788732394, "grad_norm": 2.0339698791503906, "learning_rate": 3.609e-06, "loss": 0.6089, "step": 1206 }, { "epoch": 6.783098591549296, "grad_norm": 2.2889506816864014, "learning_rate": 3.612e-06, "loss": 0.4391, "step": 1207 }, { "epoch": 6.788732394366197, "grad_norm": 4.053736686706543, "learning_rate": 3.615e-06, "loss": 0.5529, "step": 1208 }, { "epoch": 6.794366197183098, "grad_norm": 2.6446938514709473, "learning_rate": 3.618e-06, "loss": 0.4333, "step": 1209 }, { "epoch": 6.8, "grad_norm": 1.5292097330093384, "learning_rate": 3.621e-06, "loss": 0.3836, "step": 1210 }, { "epoch": 6.805633802816901, "grad_norm": 1.9998457431793213, "learning_rate": 3.6240000000000004e-06, "loss": 0.4525, "step": 1211 }, { "epoch": 6.811267605633803, "grad_norm": 2.02827525138855, "learning_rate": 3.6269999999999997e-06, "loss": 0.38, "step": 1212 }, { "epoch": 6.816901408450704, "grad_norm": 1.8356679677963257, "learning_rate": 3.63e-06, "loss": 0.4216, "step": 1213 }, { "epoch": 6.822535211267605, "grad_norm": 1.4177058935165405, "learning_rate": 3.633e-06, "loss": 0.3505, "step": 1214 }, { "epoch": 6.828169014084507, "grad_norm": 3.1898791790008545, "learning_rate": 3.636e-06, "loss": 0.3956, "step": 1215 }, { "epoch": 6.833802816901408, "grad_norm": 1.4765366315841675, "learning_rate": 3.6390000000000002e-06, "loss": 0.31, "step": 1216 }, { "epoch": 6.83943661971831, "grad_norm": 1.524936318397522, "learning_rate": 3.642e-06, "loss": 0.3636, "step": 1217 }, { "epoch": 6.845070422535211, "grad_norm": 2.0679056644439697, "learning_rate": 3.645e-06, "loss": 0.3194, "step": 1218 }, { "epoch": 6.850704225352112, "grad_norm": 1.6321165561676025, "learning_rate": 3.648e-06, "loss": 0.3268, "step": 1219 }, { "epoch": 6.856338028169014, "grad_norm": 2.5327794551849365, "learning_rate": 3.6510000000000003e-06, "loss": 0.3631, "step": 1220 }, { "epoch": 6.861971830985915, "grad_norm": 3.757030963897705, "learning_rate": 3.654e-06, "loss": 0.3894, "step": 1221 }, { "epoch": 6.867605633802817, "grad_norm": 1.5975909233093262, "learning_rate": 3.657e-06, "loss": 0.3464, "step": 1222 }, { "epoch": 6.873239436619718, "grad_norm": 1.7750897407531738, "learning_rate": 3.66e-06, "loss": 0.3796, "step": 1223 }, { "epoch": 6.878873239436619, "grad_norm": 6.039484024047852, "learning_rate": 3.663e-06, "loss": 0.3128, "step": 1224 }, { "epoch": 6.884507042253521, "grad_norm": 2.327157735824585, "learning_rate": 3.666e-06, "loss": 0.2997, "step": 1225 }, { "epoch": 6.890140845070422, "grad_norm": 2.741419553756714, "learning_rate": 3.6690000000000004e-06, "loss": 0.3336, "step": 1226 }, { "epoch": 6.895774647887324, "grad_norm": 2.91344952583313, "learning_rate": 3.6719999999999997e-06, "loss": 0.3268, "step": 1227 }, { "epoch": 6.901408450704225, "grad_norm": 1.9330127239227295, "learning_rate": 3.675e-06, "loss": 0.2898, "step": 1228 }, { "epoch": 6.907042253521126, "grad_norm": 2.1876978874206543, "learning_rate": 3.678e-06, "loss": 0.29, "step": 1229 }, { "epoch": 6.912676056338028, "grad_norm": 1.7933498620986938, "learning_rate": 3.681e-06, "loss": 0.3507, "step": 1230 }, { "epoch": 6.918309859154929, "grad_norm": 2.1437816619873047, "learning_rate": 3.6840000000000002e-06, "loss": 0.2603, "step": 1231 }, { "epoch": 6.923943661971831, "grad_norm": 2.285102128982544, "learning_rate": 3.687e-06, "loss": 0.336, "step": 1232 }, { "epoch": 6.929577464788732, "grad_norm": 3.033290386199951, "learning_rate": 3.6900000000000002e-06, "loss": 0.2447, "step": 1233 }, { "epoch": 6.9352112676056334, "grad_norm": 1.8339595794677734, "learning_rate": 3.693e-06, "loss": 0.3326, "step": 1234 }, { "epoch": 6.940845070422535, "grad_norm": 6.130736351013184, "learning_rate": 3.6960000000000003e-06, "loss": 0.2775, "step": 1235 }, { "epoch": 6.946478873239436, "grad_norm": 1.6360076665878296, "learning_rate": 3.6990000000000005e-06, "loss": 0.3548, "step": 1236 }, { "epoch": 6.952112676056338, "grad_norm": 1.8222588300704956, "learning_rate": 3.702e-06, "loss": 0.2508, "step": 1237 }, { "epoch": 6.957746478873239, "grad_norm": 2.4487926959991455, "learning_rate": 3.705e-06, "loss": 0.2322, "step": 1238 }, { "epoch": 6.9633802816901404, "grad_norm": 1.6656486988067627, "learning_rate": 3.7080000000000003e-06, "loss": 0.3293, "step": 1239 }, { "epoch": 6.969014084507043, "grad_norm": 2.3842313289642334, "learning_rate": 3.711e-06, "loss": 0.254, "step": 1240 }, { "epoch": 6.974647887323943, "grad_norm": 1.6867510080337524, "learning_rate": 3.714e-06, "loss": 0.22, "step": 1241 }, { "epoch": 6.9802816901408455, "grad_norm": 3.4533419609069824, "learning_rate": 3.717e-06, "loss": 0.3367, "step": 1242 }, { "epoch": 6.985915492957746, "grad_norm": 2.2456490993499756, "learning_rate": 3.72e-06, "loss": 0.2197, "step": 1243 }, { "epoch": 6.991549295774648, "grad_norm": 2.6007769107818604, "learning_rate": 3.723e-06, "loss": 0.3269, "step": 1244 }, { "epoch": 6.997183098591549, "grad_norm": 2.2577474117279053, "learning_rate": 3.7260000000000004e-06, "loss": 0.3661, "step": 1245 }, { "epoch": 7.0, "grad_norm": 1.489585041999817, "learning_rate": 3.7289999999999998e-06, "loss": 0.1072, "step": 1246 }, { "epoch": 7.005633802816901, "grad_norm": 2.942796468734741, "learning_rate": 3.732e-06, "loss": 0.597, "step": 1247 }, { "epoch": 7.011267605633803, "grad_norm": 1.9702796936035156, "learning_rate": 3.7350000000000002e-06, "loss": 0.5227, "step": 1248 }, { "epoch": 7.016901408450704, "grad_norm": 1.9511970281600952, "learning_rate": 3.738e-06, "loss": 0.5673, "step": 1249 }, { "epoch": 7.022535211267606, "grad_norm": 1.8123960494995117, "learning_rate": 3.7410000000000003e-06, "loss": 0.4931, "step": 1250 }, { "epoch": 7.028169014084507, "grad_norm": 2.1841039657592773, "learning_rate": 3.744e-06, "loss": 0.5076, "step": 1251 }, { "epoch": 7.033802816901408, "grad_norm": 2.381967306137085, "learning_rate": 3.747e-06, "loss": 0.4559, "step": 1252 }, { "epoch": 7.03943661971831, "grad_norm": 1.7379308938980103, "learning_rate": 3.75e-06, "loss": 0.4323, "step": 1253 }, { "epoch": 7.045070422535211, "grad_norm": 2.217899799346924, "learning_rate": 3.753e-06, "loss": 0.3766, "step": 1254 }, { "epoch": 7.050704225352113, "grad_norm": 1.6105690002441406, "learning_rate": 3.756e-06, "loss": 0.3546, "step": 1255 }, { "epoch": 7.056338028169014, "grad_norm": 1.9141321182250977, "learning_rate": 3.759e-06, "loss": 0.3721, "step": 1256 }, { "epoch": 7.061971830985915, "grad_norm": 3.5647926330566406, "learning_rate": 3.7620000000000006e-06, "loss": 0.4276, "step": 1257 }, { "epoch": 7.067605633802817, "grad_norm": 2.1066043376922607, "learning_rate": 3.765e-06, "loss": 0.4077, "step": 1258 }, { "epoch": 7.073239436619718, "grad_norm": 2.4392478466033936, "learning_rate": 3.7679999999999998e-06, "loss": 0.4304, "step": 1259 }, { "epoch": 7.07887323943662, "grad_norm": 2.377776622772217, "learning_rate": 3.7710000000000004e-06, "loss": 0.422, "step": 1260 }, { "epoch": 7.084507042253521, "grad_norm": 2.3444559574127197, "learning_rate": 3.7739999999999998e-06, "loss": 0.3579, "step": 1261 }, { "epoch": 7.090140845070422, "grad_norm": 1.7686365842819214, "learning_rate": 3.7770000000000004e-06, "loss": 0.4232, "step": 1262 }, { "epoch": 7.095774647887324, "grad_norm": 3.6515040397644043, "learning_rate": 3.7800000000000002e-06, "loss": 0.3501, "step": 1263 }, { "epoch": 7.101408450704225, "grad_norm": 1.8560153245925903, "learning_rate": 3.7829999999999996e-06, "loss": 0.3537, "step": 1264 }, { "epoch": 7.107042253521127, "grad_norm": 1.4742803573608398, "learning_rate": 3.7860000000000003e-06, "loss": 0.3448, "step": 1265 }, { "epoch": 7.112676056338028, "grad_norm": 1.8644962310791016, "learning_rate": 3.789e-06, "loss": 0.3481, "step": 1266 }, { "epoch": 7.118309859154929, "grad_norm": 2.6356472969055176, "learning_rate": 3.7920000000000003e-06, "loss": 0.3168, "step": 1267 }, { "epoch": 7.123943661971831, "grad_norm": 2.093303918838501, "learning_rate": 3.795e-06, "loss": 0.3614, "step": 1268 }, { "epoch": 7.129577464788732, "grad_norm": 2.9173734188079834, "learning_rate": 3.798e-06, "loss": 0.3811, "step": 1269 }, { "epoch": 7.135211267605634, "grad_norm": 1.9854310750961304, "learning_rate": 3.801e-06, "loss": 0.2977, "step": 1270 }, { "epoch": 7.140845070422535, "grad_norm": 1.7500495910644531, "learning_rate": 3.804e-06, "loss": 0.3395, "step": 1271 }, { "epoch": 7.146478873239436, "grad_norm": 2.1014959812164307, "learning_rate": 3.8070000000000006e-06, "loss": 0.2915, "step": 1272 }, { "epoch": 7.152112676056338, "grad_norm": 2.16127347946167, "learning_rate": 3.81e-06, "loss": 0.2404, "step": 1273 }, { "epoch": 7.157746478873239, "grad_norm": 1.8703943490982056, "learning_rate": 3.8129999999999997e-06, "loss": 0.2151, "step": 1274 }, { "epoch": 7.163380281690141, "grad_norm": 1.5653964281082153, "learning_rate": 3.816e-06, "loss": 0.3025, "step": 1275 }, { "epoch": 7.169014084507042, "grad_norm": 1.7427077293395996, "learning_rate": 3.819e-06, "loss": 0.2936, "step": 1276 }, { "epoch": 7.174647887323943, "grad_norm": 1.5312286615371704, "learning_rate": 3.822000000000001e-06, "loss": 0.2488, "step": 1277 }, { "epoch": 7.180281690140845, "grad_norm": 2.5558838844299316, "learning_rate": 3.825e-06, "loss": 0.3139, "step": 1278 }, { "epoch": 7.185915492957746, "grad_norm": 1.9759135246276855, "learning_rate": 3.828e-06, "loss": 0.3107, "step": 1279 }, { "epoch": 7.191549295774648, "grad_norm": 5.1972455978393555, "learning_rate": 3.831e-06, "loss": 0.3202, "step": 1280 }, { "epoch": 7.197183098591549, "grad_norm": 1.7822787761688232, "learning_rate": 3.834e-06, "loss": 0.3294, "step": 1281 }, { "epoch": 7.20281690140845, "grad_norm": 2.3986661434173584, "learning_rate": 3.837000000000001e-06, "loss": 0.2741, "step": 1282 }, { "epoch": 7.208450704225352, "grad_norm": 1.836089015007019, "learning_rate": 3.8400000000000005e-06, "loss": 0.3044, "step": 1283 }, { "epoch": 7.214084507042253, "grad_norm": 4.180420398712158, "learning_rate": 3.8429999999999995e-06, "loss": 0.1905, "step": 1284 }, { "epoch": 7.219718309859155, "grad_norm": 2.484987497329712, "learning_rate": 3.846e-06, "loss": 0.2927, "step": 1285 }, { "epoch": 7.225352112676056, "grad_norm": 1.5884267091751099, "learning_rate": 3.849e-06, "loss": 0.2118, "step": 1286 }, { "epoch": 7.230985915492957, "grad_norm": 2.020714521408081, "learning_rate": 3.852e-06, "loss": 0.2936, "step": 1287 }, { "epoch": 7.236619718309859, "grad_norm": 1.4779607057571411, "learning_rate": 3.855e-06, "loss": 0.2105, "step": 1288 }, { "epoch": 7.24225352112676, "grad_norm": 1.9601703882217407, "learning_rate": 3.858e-06, "loss": 0.1852, "step": 1289 }, { "epoch": 7.247887323943662, "grad_norm": 2.3440682888031006, "learning_rate": 3.861e-06, "loss": 0.3141, "step": 1290 }, { "epoch": 7.253521126760563, "grad_norm": 6.40558385848999, "learning_rate": 3.864e-06, "loss": 0.6681, "step": 1291 }, { "epoch": 7.259154929577464, "grad_norm": 5.862678527832031, "learning_rate": 3.8669999999999996e-06, "loss": 0.486, "step": 1292 }, { "epoch": 7.264788732394366, "grad_norm": 1.917628288269043, "learning_rate": 3.87e-06, "loss": 0.5683, "step": 1293 }, { "epoch": 7.270422535211267, "grad_norm": 2.065943717956543, "learning_rate": 3.873e-06, "loss": 0.5284, "step": 1294 }, { "epoch": 7.276056338028169, "grad_norm": 2.5022778511047363, "learning_rate": 3.876000000000001e-06, "loss": 0.5513, "step": 1295 }, { "epoch": 7.28169014084507, "grad_norm": 2.5667805671691895, "learning_rate": 3.8790000000000005e-06, "loss": 0.5083, "step": 1296 }, { "epoch": 7.2873239436619714, "grad_norm": 2.2210357189178467, "learning_rate": 3.8819999999999994e-06, "loss": 0.4684, "step": 1297 }, { "epoch": 7.292957746478873, "grad_norm": 2.0362372398376465, "learning_rate": 3.885e-06, "loss": 0.5014, "step": 1298 }, { "epoch": 7.298591549295774, "grad_norm": 2.5081002712249756, "learning_rate": 3.888e-06, "loss": 0.4344, "step": 1299 }, { "epoch": 7.304225352112676, "grad_norm": 2.7737104892730713, "learning_rate": 3.8910000000000005e-06, "loss": 0.4483, "step": 1300 }, { "epoch": 7.309859154929577, "grad_norm": 4.6342949867248535, "learning_rate": 3.894e-06, "loss": 0.4756, "step": 1301 }, { "epoch": 7.3154929577464785, "grad_norm": 4.416937351226807, "learning_rate": 3.897e-06, "loss": 0.3705, "step": 1302 }, { "epoch": 7.321126760563381, "grad_norm": 6.249976634979248, "learning_rate": 3.9e-06, "loss": 0.398, "step": 1303 }, { "epoch": 7.326760563380281, "grad_norm": 1.5343388319015503, "learning_rate": 3.903e-06, "loss": 0.2925, "step": 1304 }, { "epoch": 7.3323943661971835, "grad_norm": 18.657236099243164, "learning_rate": 3.906e-06, "loss": 0.423, "step": 1305 }, { "epoch": 7.338028169014084, "grad_norm": 1.964646577835083, "learning_rate": 3.909e-06, "loss": 0.4297, "step": 1306 }, { "epoch": 7.343661971830986, "grad_norm": 4.358383655548096, "learning_rate": 3.912e-06, "loss": 0.2941, "step": 1307 }, { "epoch": 7.349295774647887, "grad_norm": 2.2277472019195557, "learning_rate": 3.915000000000001e-06, "loss": 0.3022, "step": 1308 }, { "epoch": 7.354929577464789, "grad_norm": 1.622240662574768, "learning_rate": 3.918e-06, "loss": 0.282, "step": 1309 }, { "epoch": 7.3605633802816905, "grad_norm": 2.0553150177001953, "learning_rate": 3.921e-06, "loss": 0.3984, "step": 1310 }, { "epoch": 7.366197183098592, "grad_norm": 1.7152191400527954, "learning_rate": 3.924e-06, "loss": 0.3483, "step": 1311 }, { "epoch": 7.371830985915493, "grad_norm": 1.5058910846710205, "learning_rate": 3.927e-06, "loss": 0.2638, "step": 1312 }, { "epoch": 7.377464788732395, "grad_norm": 1.9615142345428467, "learning_rate": 3.9300000000000005e-06, "loss": 0.3436, "step": 1313 }, { "epoch": 7.383098591549296, "grad_norm": 3.1061172485351562, "learning_rate": 3.933e-06, "loss": 0.3299, "step": 1314 }, { "epoch": 7.3887323943661976, "grad_norm": 1.650614857673645, "learning_rate": 3.936e-06, "loss": 0.2814, "step": 1315 }, { "epoch": 7.394366197183099, "grad_norm": 8.343231201171875, "learning_rate": 3.939e-06, "loss": 0.3009, "step": 1316 }, { "epoch": 7.4, "grad_norm": 2.07116436958313, "learning_rate": 3.942e-06, "loss": 0.3923, "step": 1317 }, { "epoch": 7.405633802816902, "grad_norm": 1.8848755359649658, "learning_rate": 3.945e-06, "loss": 0.246, "step": 1318 }, { "epoch": 7.411267605633803, "grad_norm": 1.7045667171478271, "learning_rate": 3.948e-06, "loss": 0.2561, "step": 1319 }, { "epoch": 7.416901408450705, "grad_norm": 1.672513723373413, "learning_rate": 3.951000000000001e-06, "loss": 0.3592, "step": 1320 }, { "epoch": 7.422535211267606, "grad_norm": 2.5083446502685547, "learning_rate": 3.954e-06, "loss": 0.2647, "step": 1321 }, { "epoch": 7.428169014084507, "grad_norm": 2.3258793354034424, "learning_rate": 3.9569999999999996e-06, "loss": 0.2645, "step": 1322 }, { "epoch": 7.433802816901409, "grad_norm": 1.745267629623413, "learning_rate": 3.96e-06, "loss": 0.2906, "step": 1323 }, { "epoch": 7.43943661971831, "grad_norm": 2.551854133605957, "learning_rate": 3.963e-06, "loss": 0.2761, "step": 1324 }, { "epoch": 7.445070422535212, "grad_norm": 2.0184693336486816, "learning_rate": 3.966000000000001e-06, "loss": 0.2406, "step": 1325 }, { "epoch": 7.450704225352113, "grad_norm": 2.9617841243743896, "learning_rate": 3.9690000000000005e-06, "loss": 0.2639, "step": 1326 }, { "epoch": 7.456338028169014, "grad_norm": 1.3669341802597046, "learning_rate": 3.971999999999999e-06, "loss": 0.2974, "step": 1327 }, { "epoch": 7.461971830985916, "grad_norm": 2.37941837310791, "learning_rate": 3.975e-06, "loss": 0.2117, "step": 1328 }, { "epoch": 7.467605633802817, "grad_norm": 1.8223644495010376, "learning_rate": 3.978e-06, "loss": 0.2317, "step": 1329 }, { "epoch": 7.473239436619719, "grad_norm": 2.0837550163269043, "learning_rate": 3.9810000000000005e-06, "loss": 0.3, "step": 1330 }, { "epoch": 7.47887323943662, "grad_norm": 1.8023602962493896, "learning_rate": 3.984e-06, "loss": 0.2139, "step": 1331 }, { "epoch": 7.484507042253521, "grad_norm": 2.3602218627929688, "learning_rate": 3.987e-06, "loss": 0.16, "step": 1332 }, { "epoch": 7.490140845070423, "grad_norm": 3.1677684783935547, "learning_rate": 3.99e-06, "loss": 0.2876, "step": 1333 }, { "epoch": 7.495774647887324, "grad_norm": 2.2833266258239746, "learning_rate": 3.993e-06, "loss": 0.2164, "step": 1334 }, { "epoch": 7.501408450704226, "grad_norm": 4.082460880279541, "learning_rate": 3.996e-06, "loss": 0.5892, "step": 1335 }, { "epoch": 7.507042253521127, "grad_norm": 2.5469284057617188, "learning_rate": 3.999e-06, "loss": 0.6576, "step": 1336 }, { "epoch": 7.512676056338028, "grad_norm": 2.4770989418029785, "learning_rate": 4.002e-06, "loss": 0.4967, "step": 1337 }, { "epoch": 7.51830985915493, "grad_norm": 2.6632752418518066, "learning_rate": 4.005000000000001e-06, "loss": 0.4906, "step": 1338 }, { "epoch": 7.523943661971831, "grad_norm": 3.051459789276123, "learning_rate": 4.008e-06, "loss": 0.547, "step": 1339 }, { "epoch": 7.529577464788733, "grad_norm": 2.0182905197143555, "learning_rate": 4.011e-06, "loss": 0.3925, "step": 1340 }, { "epoch": 7.535211267605634, "grad_norm": 1.9682265520095825, "learning_rate": 4.014e-06, "loss": 0.4279, "step": 1341 }, { "epoch": 7.540845070422535, "grad_norm": 2.786733388900757, "learning_rate": 4.017e-06, "loss": 0.4492, "step": 1342 }, { "epoch": 7.546478873239437, "grad_norm": 2.151893138885498, "learning_rate": 4.0200000000000005e-06, "loss": 0.4613, "step": 1343 }, { "epoch": 7.552112676056338, "grad_norm": 2.2952075004577637, "learning_rate": 4.023e-06, "loss": 0.2793, "step": 1344 }, { "epoch": 7.55774647887324, "grad_norm": 1.57631516456604, "learning_rate": 4.026000000000001e-06, "loss": 0.4238, "step": 1345 }, { "epoch": 7.563380281690141, "grad_norm": 1.1604045629501343, "learning_rate": 4.029e-06, "loss": 0.3005, "step": 1346 }, { "epoch": 7.569014084507042, "grad_norm": 1.8170359134674072, "learning_rate": 4.032e-06, "loss": 0.4152, "step": 1347 }, { "epoch": 7.574647887323944, "grad_norm": 3.171816110610962, "learning_rate": 4.035e-06, "loss": 0.5213, "step": 1348 }, { "epoch": 7.580281690140845, "grad_norm": 1.4295943975448608, "learning_rate": 4.038e-06, "loss": 0.3191, "step": 1349 }, { "epoch": 7.585915492957747, "grad_norm": 1.3342307806015015, "learning_rate": 4.041e-06, "loss": 0.2942, "step": 1350 }, { "epoch": 7.591549295774648, "grad_norm": 1.5344983339309692, "learning_rate": 4.044000000000001e-06, "loss": 0.3343, "step": 1351 }, { "epoch": 7.597183098591549, "grad_norm": 2.176684856414795, "learning_rate": 4.0469999999999995e-06, "loss": 0.3469, "step": 1352 }, { "epoch": 7.602816901408451, "grad_norm": 1.3454927206039429, "learning_rate": 4.05e-06, "loss": 0.3074, "step": 1353 }, { "epoch": 7.608450704225352, "grad_norm": 1.4725590944290161, "learning_rate": 4.053e-06, "loss": 0.2926, "step": 1354 }, { "epoch": 7.614084507042254, "grad_norm": 2.4468507766723633, "learning_rate": 4.056e-06, "loss": 0.2698, "step": 1355 }, { "epoch": 7.619718309859155, "grad_norm": 1.4236220121383667, "learning_rate": 4.0590000000000004e-06, "loss": 0.2637, "step": 1356 }, { "epoch": 7.625352112676056, "grad_norm": 2.2398552894592285, "learning_rate": 4.062e-06, "loss": 0.3317, "step": 1357 }, { "epoch": 7.630985915492958, "grad_norm": 1.5240908861160278, "learning_rate": 4.065e-06, "loss": 0.342, "step": 1358 }, { "epoch": 7.636619718309859, "grad_norm": 1.9051419496536255, "learning_rate": 4.068e-06, "loss": 0.2736, "step": 1359 }, { "epoch": 7.642253521126761, "grad_norm": 1.6727283000946045, "learning_rate": 4.071e-06, "loss": 0.244, "step": 1360 }, { "epoch": 7.647887323943662, "grad_norm": 2.1765661239624023, "learning_rate": 4.074e-06, "loss": 0.3132, "step": 1361 }, { "epoch": 7.653521126760563, "grad_norm": 1.9414353370666504, "learning_rate": 4.077e-06, "loss": 0.2103, "step": 1362 }, { "epoch": 7.659154929577465, "grad_norm": 1.9741021394729614, "learning_rate": 4.080000000000001e-06, "loss": 0.2283, "step": 1363 }, { "epoch": 7.664788732394366, "grad_norm": 1.7333275079727173, "learning_rate": 4.083e-06, "loss": 0.2811, "step": 1364 }, { "epoch": 7.670422535211268, "grad_norm": 1.6467069387435913, "learning_rate": 4.0859999999999995e-06, "loss": 0.1742, "step": 1365 }, { "epoch": 7.676056338028169, "grad_norm": 1.3855736255645752, "learning_rate": 4.089e-06, "loss": 0.2564, "step": 1366 }, { "epoch": 7.68169014084507, "grad_norm": 9.799458503723145, "learning_rate": 4.092e-06, "loss": 0.3455, "step": 1367 }, { "epoch": 7.687323943661972, "grad_norm": 1.322006106376648, "learning_rate": 4.095000000000001e-06, "loss": 0.212, "step": 1368 }, { "epoch": 7.692957746478873, "grad_norm": 1.6951122283935547, "learning_rate": 4.098e-06, "loss": 0.238, "step": 1369 }, { "epoch": 7.698591549295775, "grad_norm": 1.7336348295211792, "learning_rate": 4.100999999999999e-06, "loss": 0.2513, "step": 1370 }, { "epoch": 7.704225352112676, "grad_norm": 2.091733694076538, "learning_rate": 4.104e-06, "loss": 0.2341, "step": 1371 }, { "epoch": 7.709859154929577, "grad_norm": 2.2940683364868164, "learning_rate": 4.107e-06, "loss": 0.198, "step": 1372 }, { "epoch": 7.715492957746479, "grad_norm": 2.327643632888794, "learning_rate": 4.1100000000000005e-06, "loss": 0.3443, "step": 1373 }, { "epoch": 7.72112676056338, "grad_norm": 1.4039936065673828, "learning_rate": 4.113e-06, "loss": 0.2889, "step": 1374 }, { "epoch": 7.726760563380282, "grad_norm": 1.3708513975143433, "learning_rate": 4.116e-06, "loss": 0.2023, "step": 1375 }, { "epoch": 7.732394366197183, "grad_norm": 2.1089067459106445, "learning_rate": 4.119e-06, "loss": 0.2668, "step": 1376 }, { "epoch": 7.738028169014084, "grad_norm": 2.277501344680786, "learning_rate": 4.122e-06, "loss": 0.2216, "step": 1377 }, { "epoch": 7.743661971830986, "grad_norm": 1.9902244806289673, "learning_rate": 4.125e-06, "loss": 0.2451, "step": 1378 }, { "epoch": 7.749295774647887, "grad_norm": 8.09443187713623, "learning_rate": 4.128e-06, "loss": 0.5286, "step": 1379 }, { "epoch": 7.754929577464789, "grad_norm": 3.9587812423706055, "learning_rate": 4.131e-06, "loss": 0.4683, "step": 1380 }, { "epoch": 7.76056338028169, "grad_norm": 1.3808214664459229, "learning_rate": 4.1340000000000006e-06, "loss": 0.4592, "step": 1381 }, { "epoch": 7.766197183098591, "grad_norm": 1.801071047782898, "learning_rate": 4.137e-06, "loss": 0.4669, "step": 1382 }, { "epoch": 7.771830985915493, "grad_norm": 1.8322831392288208, "learning_rate": 4.14e-06, "loss": 0.4361, "step": 1383 }, { "epoch": 7.777464788732394, "grad_norm": 2.574476480484009, "learning_rate": 4.143e-06, "loss": 0.4518, "step": 1384 }, { "epoch": 7.783098591549296, "grad_norm": 2.223013162612915, "learning_rate": 4.146e-06, "loss": 0.384, "step": 1385 }, { "epoch": 7.788732394366197, "grad_norm": 2.7924444675445557, "learning_rate": 4.1490000000000004e-06, "loss": 0.4443, "step": 1386 }, { "epoch": 7.794366197183098, "grad_norm": 2.544017791748047, "learning_rate": 4.152e-06, "loss": 0.424, "step": 1387 }, { "epoch": 7.8, "grad_norm": 2.595677375793457, "learning_rate": 4.155000000000001e-06, "loss": 0.4945, "step": 1388 }, { "epoch": 7.805633802816901, "grad_norm": 1.265525460243225, "learning_rate": 4.158e-06, "loss": 0.3223, "step": 1389 }, { "epoch": 7.811267605633803, "grad_norm": 1.8042664527893066, "learning_rate": 4.161e-06, "loss": 0.3606, "step": 1390 }, { "epoch": 7.816901408450704, "grad_norm": 1.4605374336242676, "learning_rate": 4.164e-06, "loss": 0.2847, "step": 1391 }, { "epoch": 7.822535211267605, "grad_norm": 1.7976760864257812, "learning_rate": 4.167e-06, "loss": 0.309, "step": 1392 }, { "epoch": 7.828169014084507, "grad_norm": 1.9072175025939941, "learning_rate": 4.170000000000001e-06, "loss": 0.3451, "step": 1393 }, { "epoch": 7.833802816901408, "grad_norm": 1.9373173713684082, "learning_rate": 4.1730000000000005e-06, "loss": 0.3706, "step": 1394 }, { "epoch": 7.83943661971831, "grad_norm": 1.8539760112762451, "learning_rate": 4.1759999999999995e-06, "loss": 0.2835, "step": 1395 }, { "epoch": 7.845070422535211, "grad_norm": 2.5366387367248535, "learning_rate": 4.179e-06, "loss": 0.4152, "step": 1396 }, { "epoch": 7.850704225352112, "grad_norm": 1.1432427167892456, "learning_rate": 4.182e-06, "loss": 0.2335, "step": 1397 }, { "epoch": 7.856338028169014, "grad_norm": 1.6592440605163574, "learning_rate": 4.185000000000001e-06, "loss": 0.2565, "step": 1398 }, { "epoch": 7.861971830985915, "grad_norm": 2.1884870529174805, "learning_rate": 4.188e-06, "loss": 0.3067, "step": 1399 }, { "epoch": 7.867605633802817, "grad_norm": 1.9141830205917358, "learning_rate": 4.191e-06, "loss": 0.3169, "step": 1400 }, { "epoch": 7.873239436619718, "grad_norm": 2.1069116592407227, "learning_rate": 4.194e-06, "loss": 0.279, "step": 1401 }, { "epoch": 7.878873239436619, "grad_norm": 1.2876276969909668, "learning_rate": 4.197e-06, "loss": 0.2509, "step": 1402 }, { "epoch": 7.884507042253521, "grad_norm": 1.3350346088409424, "learning_rate": 4.2000000000000004e-06, "loss": 0.2648, "step": 1403 }, { "epoch": 7.890140845070422, "grad_norm": 2.1058509349823, "learning_rate": 4.203e-06, "loss": 0.2377, "step": 1404 }, { "epoch": 7.895774647887324, "grad_norm": 1.1431804895401, "learning_rate": 4.206e-06, "loss": 0.233, "step": 1405 }, { "epoch": 7.901408450704225, "grad_norm": 1.5223488807678223, "learning_rate": 4.209000000000001e-06, "loss": 0.2711, "step": 1406 }, { "epoch": 7.907042253521126, "grad_norm": 1.707671046257019, "learning_rate": 4.212e-06, "loss": 0.2647, "step": 1407 }, { "epoch": 7.912676056338028, "grad_norm": 1.5224835872650146, "learning_rate": 4.215e-06, "loss": 0.2605, "step": 1408 }, { "epoch": 7.918309859154929, "grad_norm": 1.7407782077789307, "learning_rate": 4.218e-06, "loss": 0.2954, "step": 1409 }, { "epoch": 7.923943661971831, "grad_norm": 3.0225110054016113, "learning_rate": 4.221e-06, "loss": 0.2246, "step": 1410 }, { "epoch": 7.929577464788732, "grad_norm": 1.9126163721084595, "learning_rate": 4.2240000000000006e-06, "loss": 0.2617, "step": 1411 }, { "epoch": 7.9352112676056334, "grad_norm": 2.3817787170410156, "learning_rate": 4.227e-06, "loss": 0.2078, "step": 1412 }, { "epoch": 7.940845070422535, "grad_norm": 3.422006845474243, "learning_rate": 4.229999999999999e-06, "loss": 0.2221, "step": 1413 }, { "epoch": 7.946478873239436, "grad_norm": 3.3864829540252686, "learning_rate": 4.233e-06, "loss": 0.2606, "step": 1414 }, { "epoch": 7.952112676056338, "grad_norm": 1.2118957042694092, "learning_rate": 4.236e-06, "loss": 0.1792, "step": 1415 }, { "epoch": 7.957746478873239, "grad_norm": 3.22188663482666, "learning_rate": 4.239e-06, "loss": 0.313, "step": 1416 }, { "epoch": 7.9633802816901404, "grad_norm": 1.8675514459609985, "learning_rate": 4.242e-06, "loss": 0.1902, "step": 1417 }, { "epoch": 7.969014084507043, "grad_norm": 8.949451446533203, "learning_rate": 4.245e-06, "loss": 0.1873, "step": 1418 }, { "epoch": 7.974647887323943, "grad_norm": 1.4021812677383423, "learning_rate": 4.248e-06, "loss": 0.1711, "step": 1419 }, { "epoch": 7.9802816901408455, "grad_norm": 4.161938667297363, "learning_rate": 4.251e-06, "loss": 0.3818, "step": 1420 }, { "epoch": 7.985915492957746, "grad_norm": 1.6883714199066162, "learning_rate": 4.254e-06, "loss": 0.2753, "step": 1421 }, { "epoch": 7.991549295774648, "grad_norm": 1.8008395433425903, "learning_rate": 4.257e-06, "loss": 0.2576, "step": 1422 }, { "epoch": 7.997183098591549, "grad_norm": 4.47776460647583, "learning_rate": 4.26e-06, "loss": 0.3115, "step": 1423 }, { "epoch": 8.0, "grad_norm": 0.8741196393966675, "learning_rate": 4.2630000000000005e-06, "loss": 0.0678, "step": 1424 }, { "epoch": 8.005633802816902, "grad_norm": 1.759154200553894, "learning_rate": 4.266e-06, "loss": 0.4719, "step": 1425 }, { "epoch": 8.011267605633803, "grad_norm": 1.4743149280548096, "learning_rate": 4.269e-06, "loss": 0.4722, "step": 1426 }, { "epoch": 8.016901408450705, "grad_norm": 1.1948416233062744, "learning_rate": 4.272e-06, "loss": 0.4362, "step": 1427 }, { "epoch": 8.022535211267606, "grad_norm": 1.6009913682937622, "learning_rate": 4.275e-06, "loss": 0.4798, "step": 1428 }, { "epoch": 8.028169014084508, "grad_norm": 1.272077202796936, "learning_rate": 4.278e-06, "loss": 0.3524, "step": 1429 }, { "epoch": 8.033802816901408, "grad_norm": 2.362617254257202, "learning_rate": 4.281e-06, "loss": 0.4642, "step": 1430 }, { "epoch": 8.03943661971831, "grad_norm": 1.950538992881775, "learning_rate": 4.284000000000001e-06, "loss": 0.4004, "step": 1431 }, { "epoch": 8.045070422535211, "grad_norm": 3.8150370121002197, "learning_rate": 4.287e-06, "loss": 0.3895, "step": 1432 }, { "epoch": 8.050704225352113, "grad_norm": 2.2891299724578857, "learning_rate": 4.29e-06, "loss": 0.3773, "step": 1433 }, { "epoch": 8.056338028169014, "grad_norm": 1.8192813396453857, "learning_rate": 4.293e-06, "loss": 0.3604, "step": 1434 }, { "epoch": 8.061971830985916, "grad_norm": 2.5638978481292725, "learning_rate": 4.296e-06, "loss": 0.4413, "step": 1435 }, { "epoch": 8.067605633802817, "grad_norm": 1.534132480621338, "learning_rate": 4.299000000000001e-06, "loss": 0.3052, "step": 1436 }, { "epoch": 8.073239436619719, "grad_norm": 6.902675628662109, "learning_rate": 4.3020000000000005e-06, "loss": 0.3758, "step": 1437 }, { "epoch": 8.07887323943662, "grad_norm": 4.676906585693359, "learning_rate": 4.3049999999999994e-06, "loss": 0.4069, "step": 1438 }, { "epoch": 8.084507042253522, "grad_norm": 2.597491979598999, "learning_rate": 4.308e-06, "loss": 0.3956, "step": 1439 }, { "epoch": 8.090140845070422, "grad_norm": 1.816064715385437, "learning_rate": 4.311e-06, "loss": 0.3575, "step": 1440 }, { "epoch": 8.095774647887325, "grad_norm": 1.692100167274475, "learning_rate": 4.3140000000000005e-06, "loss": 0.3229, "step": 1441 }, { "epoch": 8.101408450704225, "grad_norm": 3.6745800971984863, "learning_rate": 4.317e-06, "loss": 0.3284, "step": 1442 }, { "epoch": 8.107042253521128, "grad_norm": 1.7762925624847412, "learning_rate": 4.32e-06, "loss": 0.2321, "step": 1443 }, { "epoch": 8.112676056338028, "grad_norm": 2.830124855041504, "learning_rate": 4.323e-06, "loss": 0.324, "step": 1444 }, { "epoch": 8.11830985915493, "grad_norm": 2.0445613861083984, "learning_rate": 4.326e-06, "loss": 0.2481, "step": 1445 }, { "epoch": 8.12394366197183, "grad_norm": 1.7651817798614502, "learning_rate": 4.329e-06, "loss": 0.3089, "step": 1446 }, { "epoch": 8.129577464788733, "grad_norm": 8.616571426391602, "learning_rate": 4.332e-06, "loss": 0.279, "step": 1447 }, { "epoch": 8.135211267605634, "grad_norm": 1.35053551197052, "learning_rate": 4.335e-06, "loss": 0.2878, "step": 1448 }, { "epoch": 8.140845070422536, "grad_norm": 1.2706048488616943, "learning_rate": 4.338000000000001e-06, "loss": 0.2505, "step": 1449 }, { "epoch": 8.146478873239436, "grad_norm": 3.066431760787964, "learning_rate": 4.341e-06, "loss": 0.2727, "step": 1450 }, { "epoch": 8.152112676056339, "grad_norm": 2.0426888465881348, "learning_rate": 4.344e-06, "loss": 0.2798, "step": 1451 }, { "epoch": 8.15774647887324, "grad_norm": 1.1568485498428345, "learning_rate": 4.347e-06, "loss": 0.222, "step": 1452 }, { "epoch": 8.163380281690142, "grad_norm": 1.2766001224517822, "learning_rate": 4.35e-06, "loss": 0.2244, "step": 1453 }, { "epoch": 8.169014084507042, "grad_norm": 2.2523610591888428, "learning_rate": 4.3530000000000005e-06, "loss": 0.2947, "step": 1454 }, { "epoch": 8.174647887323944, "grad_norm": 1.4231154918670654, "learning_rate": 4.356e-06, "loss": 0.1851, "step": 1455 }, { "epoch": 8.180281690140845, "grad_norm": 1.11939537525177, "learning_rate": 4.359e-06, "loss": 0.2087, "step": 1456 }, { "epoch": 8.185915492957747, "grad_norm": 1.716064691543579, "learning_rate": 4.362e-06, "loss": 0.2524, "step": 1457 }, { "epoch": 8.191549295774648, "grad_norm": 1.6529468297958374, "learning_rate": 4.365e-06, "loss": 0.2094, "step": 1458 }, { "epoch": 8.19718309859155, "grad_norm": 1.3632049560546875, "learning_rate": 4.368e-06, "loss": 0.2216, "step": 1459 }, { "epoch": 8.20281690140845, "grad_norm": 1.7184388637542725, "learning_rate": 4.371e-06, "loss": 0.2301, "step": 1460 }, { "epoch": 8.208450704225353, "grad_norm": 1.2325299978256226, "learning_rate": 4.374000000000001e-06, "loss": 0.2379, "step": 1461 }, { "epoch": 8.214084507042253, "grad_norm": 2.476687431335449, "learning_rate": 4.377e-06, "loss": 0.239, "step": 1462 }, { "epoch": 8.219718309859156, "grad_norm": 1.7622426748275757, "learning_rate": 4.3799999999999996e-06, "loss": 0.2606, "step": 1463 }, { "epoch": 8.225352112676056, "grad_norm": 8.9843168258667, "learning_rate": 4.383e-06, "loss": 0.412, "step": 1464 }, { "epoch": 8.230985915492958, "grad_norm": 1.143917202949524, "learning_rate": 4.386e-06, "loss": 0.164, "step": 1465 }, { "epoch": 8.236619718309859, "grad_norm": 1.5619351863861084, "learning_rate": 4.389000000000001e-06, "loss": 0.2957, "step": 1466 }, { "epoch": 8.242253521126761, "grad_norm": 6.872328758239746, "learning_rate": 4.3920000000000005e-06, "loss": 0.2306, "step": 1467 }, { "epoch": 8.247887323943662, "grad_norm": 1.9353114366531372, "learning_rate": 4.395e-06, "loss": 0.1851, "step": 1468 }, { "epoch": 8.253521126760564, "grad_norm": 3.791928768157959, "learning_rate": 4.398e-06, "loss": 0.5217, "step": 1469 }, { "epoch": 8.259154929577464, "grad_norm": 2.1830267906188965, "learning_rate": 4.401e-06, "loss": 0.4251, "step": 1470 }, { "epoch": 8.264788732394367, "grad_norm": 1.9686213731765747, "learning_rate": 4.4040000000000005e-06, "loss": 0.4957, "step": 1471 }, { "epoch": 8.270422535211267, "grad_norm": 1.631577730178833, "learning_rate": 4.407e-06, "loss": 0.472, "step": 1472 }, { "epoch": 8.27605633802817, "grad_norm": 1.8868086338043213, "learning_rate": 4.41e-06, "loss": 0.4835, "step": 1473 }, { "epoch": 8.28169014084507, "grad_norm": 3.9553818702697754, "learning_rate": 4.413000000000001e-06, "loss": 0.4191, "step": 1474 }, { "epoch": 8.287323943661972, "grad_norm": 1.8192929029464722, "learning_rate": 4.416e-06, "loss": 0.4281, "step": 1475 }, { "epoch": 8.292957746478873, "grad_norm": 1.603944182395935, "learning_rate": 4.4189999999999995e-06, "loss": 0.3851, "step": 1476 }, { "epoch": 8.298591549295775, "grad_norm": 1.8372137546539307, "learning_rate": 4.422e-06, "loss": 0.4719, "step": 1477 }, { "epoch": 8.304225352112676, "grad_norm": 1.615600824356079, "learning_rate": 4.425e-06, "loss": 0.3056, "step": 1478 }, { "epoch": 8.309859154929578, "grad_norm": 1.8355809450149536, "learning_rate": 4.428000000000001e-06, "loss": 0.3903, "step": 1479 }, { "epoch": 8.315492957746478, "grad_norm": 1.545371651649475, "learning_rate": 4.4310000000000004e-06, "loss": 0.3166, "step": 1480 }, { "epoch": 8.32112676056338, "grad_norm": 2.2291488647460938, "learning_rate": 4.433999999999999e-06, "loss": 0.2998, "step": 1481 }, { "epoch": 8.326760563380281, "grad_norm": 1.7675871849060059, "learning_rate": 4.437e-06, "loss": 0.369, "step": 1482 }, { "epoch": 8.332394366197184, "grad_norm": 1.6342406272888184, "learning_rate": 4.44e-06, "loss": 0.3008, "step": 1483 }, { "epoch": 8.338028169014084, "grad_norm": 1.5755921602249146, "learning_rate": 4.4430000000000005e-06, "loss": 0.2988, "step": 1484 }, { "epoch": 8.343661971830986, "grad_norm": 1.649460792541504, "learning_rate": 4.446e-06, "loss": 0.2371, "step": 1485 }, { "epoch": 8.349295774647887, "grad_norm": 1.5615849494934082, "learning_rate": 4.449e-06, "loss": 0.2648, "step": 1486 }, { "epoch": 8.35492957746479, "grad_norm": 1.6270219087600708, "learning_rate": 4.452e-06, "loss": 0.2899, "step": 1487 }, { "epoch": 8.36056338028169, "grad_norm": 1.7076084613800049, "learning_rate": 4.455e-06, "loss": 0.2863, "step": 1488 }, { "epoch": 8.366197183098592, "grad_norm": 4.066646575927734, "learning_rate": 4.458e-06, "loss": 0.2746, "step": 1489 }, { "epoch": 8.371830985915492, "grad_norm": 1.1448163986206055, "learning_rate": 4.461e-06, "loss": 0.2223, "step": 1490 }, { "epoch": 8.377464788732395, "grad_norm": 1.2670239210128784, "learning_rate": 4.464e-06, "loss": 0.3043, "step": 1491 }, { "epoch": 8.383098591549295, "grad_norm": 2.1269543170928955, "learning_rate": 4.467000000000001e-06, "loss": 0.2668, "step": 1492 }, { "epoch": 8.388732394366198, "grad_norm": 1.4538530111312866, "learning_rate": 4.4699999999999996e-06, "loss": 0.2809, "step": 1493 }, { "epoch": 8.394366197183098, "grad_norm": 1.4513168334960938, "learning_rate": 4.473e-06, "loss": 0.2391, "step": 1494 }, { "epoch": 8.4, "grad_norm": 1.5041109323501587, "learning_rate": 4.476e-06, "loss": 0.2806, "step": 1495 }, { "epoch": 8.4056338028169, "grad_norm": 1.5761175155639648, "learning_rate": 4.479e-06, "loss": 0.2162, "step": 1496 }, { "epoch": 8.411267605633803, "grad_norm": 2.805145263671875, "learning_rate": 4.4820000000000005e-06, "loss": 0.1815, "step": 1497 }, { "epoch": 8.416901408450704, "grad_norm": 1.6293209791183472, "learning_rate": 4.485e-06, "loss": 0.2175, "step": 1498 }, { "epoch": 8.422535211267606, "grad_norm": 1.6138747930526733, "learning_rate": 4.488e-06, "loss": 0.2333, "step": 1499 }, { "epoch": 8.428169014084506, "grad_norm": 2.731999397277832, "learning_rate": 4.491e-06, "loss": 0.2163, "step": 1500 }, { "epoch": 8.433802816901409, "grad_norm": 2.1869711875915527, "learning_rate": 4.494e-06, "loss": 0.2884, "step": 1501 }, { "epoch": 8.43943661971831, "grad_norm": 1.994489073753357, "learning_rate": 4.497e-06, "loss": 0.2571, "step": 1502 }, { "epoch": 8.445070422535212, "grad_norm": 1.321844220161438, "learning_rate": 4.5e-06, "loss": 0.2373, "step": 1503 }, { "epoch": 8.450704225352112, "grad_norm": 2.4946820735931396, "learning_rate": 4.503000000000001e-06, "loss": 0.2132, "step": 1504 }, { "epoch": 8.456338028169014, "grad_norm": 2.7976155281066895, "learning_rate": 4.506e-06, "loss": 0.172, "step": 1505 }, { "epoch": 8.461971830985915, "grad_norm": 1.472485899925232, "learning_rate": 4.5089999999999995e-06, "loss": 0.143, "step": 1506 }, { "epoch": 8.467605633802817, "grad_norm": 1.4903395175933838, "learning_rate": 4.512e-06, "loss": 0.2015, "step": 1507 }, { "epoch": 8.473239436619718, "grad_norm": 1.7212380170822144, "learning_rate": 4.515e-06, "loss": 0.234, "step": 1508 }, { "epoch": 8.47887323943662, "grad_norm": 1.3318415880203247, "learning_rate": 4.518000000000001e-06, "loss": 0.1578, "step": 1509 }, { "epoch": 8.48450704225352, "grad_norm": 1.2263563871383667, "learning_rate": 4.521e-06, "loss": 0.2274, "step": 1510 }, { "epoch": 8.490140845070423, "grad_norm": 29.152389526367188, "learning_rate": 4.524e-06, "loss": 0.1964, "step": 1511 }, { "epoch": 8.495774647887323, "grad_norm": 2.28582501411438, "learning_rate": 4.527e-06, "loss": 0.2611, "step": 1512 }, { "epoch": 8.501408450704226, "grad_norm": 3.437446355819702, "learning_rate": 4.53e-06, "loss": 0.5758, "step": 1513 }, { "epoch": 8.507042253521126, "grad_norm": 3.00195050239563, "learning_rate": 4.5330000000000005e-06, "loss": 0.4946, "step": 1514 }, { "epoch": 8.512676056338028, "grad_norm": 8.957681655883789, "learning_rate": 4.536e-06, "loss": 0.5118, "step": 1515 }, { "epoch": 8.518309859154929, "grad_norm": 2.333034038543701, "learning_rate": 4.539e-06, "loss": 0.4657, "step": 1516 }, { "epoch": 8.523943661971831, "grad_norm": 2.256399154663086, "learning_rate": 4.542000000000001e-06, "loss": 0.4163, "step": 1517 }, { "epoch": 8.529577464788732, "grad_norm": 2.168394088745117, "learning_rate": 4.545e-06, "loss": 0.3973, "step": 1518 }, { "epoch": 8.535211267605634, "grad_norm": 2.315410852432251, "learning_rate": 4.548e-06, "loss": 0.3839, "step": 1519 }, { "epoch": 8.540845070422534, "grad_norm": 1.4619817733764648, "learning_rate": 4.551e-06, "loss": 0.3729, "step": 1520 }, { "epoch": 8.546478873239437, "grad_norm": 2.5322492122650146, "learning_rate": 4.554e-06, "loss": 0.3076, "step": 1521 }, { "epoch": 8.552112676056337, "grad_norm": 3.1597495079040527, "learning_rate": 4.557000000000001e-06, "loss": 0.4107, "step": 1522 }, { "epoch": 8.55774647887324, "grad_norm": 1.2187769412994385, "learning_rate": 4.56e-06, "loss": 0.339, "step": 1523 }, { "epoch": 8.56338028169014, "grad_norm": 1.206600308418274, "learning_rate": 4.563e-06, "loss": 0.3094, "step": 1524 }, { "epoch": 8.569014084507042, "grad_norm": 1.7799867391586304, "learning_rate": 4.566e-06, "loss": 0.3471, "step": 1525 }, { "epoch": 8.574647887323943, "grad_norm": 1.3167754411697388, "learning_rate": 4.569e-06, "loss": 0.2437, "step": 1526 }, { "epoch": 8.580281690140845, "grad_norm": 1.7513374090194702, "learning_rate": 4.5720000000000004e-06, "loss": 0.3574, "step": 1527 }, { "epoch": 8.585915492957746, "grad_norm": 2.245265007019043, "learning_rate": 4.575e-06, "loss": 0.292, "step": 1528 }, { "epoch": 8.591549295774648, "grad_norm": 2.3302388191223145, "learning_rate": 4.578000000000001e-06, "loss": 0.2831, "step": 1529 }, { "epoch": 8.597183098591549, "grad_norm": 1.8705865144729614, "learning_rate": 4.581e-06, "loss": 0.3264, "step": 1530 }, { "epoch": 8.60281690140845, "grad_norm": 1.3408194780349731, "learning_rate": 4.584e-06, "loss": 0.2462, "step": 1531 }, { "epoch": 8.608450704225351, "grad_norm": 1.4617663621902466, "learning_rate": 4.587e-06, "loss": 0.2398, "step": 1532 }, { "epoch": 8.614084507042254, "grad_norm": 1.278315782546997, "learning_rate": 4.59e-06, "loss": 0.3061, "step": 1533 }, { "epoch": 8.619718309859154, "grad_norm": 1.5623974800109863, "learning_rate": 4.593000000000001e-06, "loss": 0.2534, "step": 1534 }, { "epoch": 8.625352112676056, "grad_norm": 1.6006313562393188, "learning_rate": 4.5960000000000006e-06, "loss": 0.2934, "step": 1535 }, { "epoch": 8.630985915492957, "grad_norm": 1.5115923881530762, "learning_rate": 4.5989999999999995e-06, "loss": 0.252, "step": 1536 }, { "epoch": 8.63661971830986, "grad_norm": 2.0316309928894043, "learning_rate": 4.602e-06, "loss": 0.2092, "step": 1537 }, { "epoch": 8.642253521126761, "grad_norm": 1.4315450191497803, "learning_rate": 4.605e-06, "loss": 0.2852, "step": 1538 }, { "epoch": 8.647887323943662, "grad_norm": 1.6427050828933716, "learning_rate": 4.608e-06, "loss": 0.2394, "step": 1539 }, { "epoch": 8.653521126760563, "grad_norm": 1.2119359970092773, "learning_rate": 4.611e-06, "loss": 0.1642, "step": 1540 }, { "epoch": 8.659154929577465, "grad_norm": 1.3998390436172485, "learning_rate": 4.614e-06, "loss": 0.2195, "step": 1541 }, { "epoch": 8.664788732394367, "grad_norm": 1.9655940532684326, "learning_rate": 4.617e-06, "loss": 0.235, "step": 1542 }, { "epoch": 8.670422535211268, "grad_norm": 2.12615966796875, "learning_rate": 4.62e-06, "loss": 0.2014, "step": 1543 }, { "epoch": 8.676056338028168, "grad_norm": 1.6178271770477295, "learning_rate": 4.623e-06, "loss": 0.2185, "step": 1544 }, { "epoch": 8.68169014084507, "grad_norm": 1.6038141250610352, "learning_rate": 4.626e-06, "loss": 0.2463, "step": 1545 }, { "epoch": 8.687323943661973, "grad_norm": 1.6338036060333252, "learning_rate": 4.629e-06, "loss": 0.2316, "step": 1546 }, { "epoch": 8.692957746478873, "grad_norm": 1.7557168006896973, "learning_rate": 4.632000000000001e-06, "loss": 0.2022, "step": 1547 }, { "epoch": 8.698591549295774, "grad_norm": 1.8935003280639648, "learning_rate": 4.635e-06, "loss": 0.2621, "step": 1548 }, { "epoch": 8.704225352112676, "grad_norm": 5.28822135925293, "learning_rate": 4.6379999999999995e-06, "loss": 0.2507, "step": 1549 }, { "epoch": 8.709859154929578, "grad_norm": 1.4678670167922974, "learning_rate": 4.641e-06, "loss": 0.1843, "step": 1550 }, { "epoch": 8.715492957746479, "grad_norm": 1.2331135272979736, "learning_rate": 4.644e-06, "loss": 0.1979, "step": 1551 }, { "epoch": 8.721126760563381, "grad_norm": 1.2113301753997803, "learning_rate": 4.6470000000000006e-06, "loss": 0.1228, "step": 1552 }, { "epoch": 8.726760563380282, "grad_norm": 1.5623661279678345, "learning_rate": 4.65e-06, "loss": 0.205, "step": 1553 }, { "epoch": 8.732394366197184, "grad_norm": 1.4790422916412354, "learning_rate": 4.653e-06, "loss": 0.1446, "step": 1554 }, { "epoch": 8.738028169014084, "grad_norm": 2.560182571411133, "learning_rate": 4.656e-06, "loss": 0.2346, "step": 1555 }, { "epoch": 8.743661971830987, "grad_norm": 1.4772406816482544, "learning_rate": 4.659e-06, "loss": 0.2076, "step": 1556 }, { "epoch": 8.749295774647887, "grad_norm": 1.5582391023635864, "learning_rate": 4.6620000000000004e-06, "loss": 0.4427, "step": 1557 }, { "epoch": 8.75492957746479, "grad_norm": 1.4840625524520874, "learning_rate": 4.665e-06, "loss": 0.5397, "step": 1558 }, { "epoch": 8.76056338028169, "grad_norm": 1.486564040184021, "learning_rate": 4.668e-06, "loss": 0.3812, "step": 1559 }, { "epoch": 8.766197183098592, "grad_norm": 1.336689829826355, "learning_rate": 4.671000000000001e-06, "loss": 0.4332, "step": 1560 }, { "epoch": 8.771830985915493, "grad_norm": 2.69026517868042, "learning_rate": 4.674e-06, "loss": 0.3702, "step": 1561 }, { "epoch": 8.777464788732395, "grad_norm": 1.4263160228729248, "learning_rate": 4.677e-06, "loss": 0.3837, "step": 1562 }, { "epoch": 8.783098591549296, "grad_norm": 1.2946802377700806, "learning_rate": 4.68e-06, "loss": 0.3174, "step": 1563 }, { "epoch": 8.788732394366198, "grad_norm": 1.1760841608047485, "learning_rate": 4.683e-06, "loss": 0.3123, "step": 1564 }, { "epoch": 8.794366197183098, "grad_norm": 1.6713536977767944, "learning_rate": 4.6860000000000005e-06, "loss": 0.3307, "step": 1565 }, { "epoch": 8.8, "grad_norm": 3.364861011505127, "learning_rate": 4.689e-06, "loss": 0.3143, "step": 1566 }, { "epoch": 8.805633802816901, "grad_norm": 4.595294952392578, "learning_rate": 4.692e-06, "loss": 0.3346, "step": 1567 }, { "epoch": 8.811267605633804, "grad_norm": 1.3517698049545288, "learning_rate": 4.695e-06, "loss": 0.3407, "step": 1568 }, { "epoch": 8.816901408450704, "grad_norm": 1.5998022556304932, "learning_rate": 4.698e-06, "loss": 0.3746, "step": 1569 }, { "epoch": 8.822535211267606, "grad_norm": 1.375231146812439, "learning_rate": 4.701e-06, "loss": 0.2922, "step": 1570 }, { "epoch": 8.828169014084507, "grad_norm": 1.1579004526138306, "learning_rate": 4.704e-06, "loss": 0.2483, "step": 1571 }, { "epoch": 8.83380281690141, "grad_norm": 1.1323860883712769, "learning_rate": 4.707000000000001e-06, "loss": 0.3158, "step": 1572 }, { "epoch": 8.83943661971831, "grad_norm": 1.894232988357544, "learning_rate": 4.71e-06, "loss": 0.3207, "step": 1573 }, { "epoch": 8.845070422535212, "grad_norm": 1.294670581817627, "learning_rate": 4.713e-06, "loss": 0.2095, "step": 1574 }, { "epoch": 8.850704225352112, "grad_norm": 1.1300592422485352, "learning_rate": 4.716e-06, "loss": 0.3171, "step": 1575 }, { "epoch": 8.856338028169015, "grad_norm": 1.9427255392074585, "learning_rate": 4.719e-06, "loss": 0.221, "step": 1576 }, { "epoch": 8.861971830985915, "grad_norm": 1.1601766347885132, "learning_rate": 4.722000000000001e-06, "loss": 0.242, "step": 1577 }, { "epoch": 8.867605633802818, "grad_norm": 1.333055019378662, "learning_rate": 4.7250000000000005e-06, "loss": 0.2893, "step": 1578 }, { "epoch": 8.873239436619718, "grad_norm": 1.6595031023025513, "learning_rate": 4.7279999999999995e-06, "loss": 0.295, "step": 1579 }, { "epoch": 8.87887323943662, "grad_norm": 1.5934200286865234, "learning_rate": 4.731e-06, "loss": 0.2654, "step": 1580 }, { "epoch": 8.88450704225352, "grad_norm": 1.705186128616333, "learning_rate": 4.734e-06, "loss": 0.2339, "step": 1581 }, { "epoch": 8.890140845070423, "grad_norm": 1.7265881299972534, "learning_rate": 4.7370000000000006e-06, "loss": 0.196, "step": 1582 }, { "epoch": 8.895774647887324, "grad_norm": 1.4162670373916626, "learning_rate": 4.74e-06, "loss": 0.1805, "step": 1583 }, { "epoch": 8.901408450704226, "grad_norm": 1.7539533376693726, "learning_rate": 4.743e-06, "loss": 0.213, "step": 1584 }, { "epoch": 8.907042253521126, "grad_norm": 1.8077377080917358, "learning_rate": 4.746e-06, "loss": 0.2184, "step": 1585 }, { "epoch": 8.912676056338029, "grad_norm": 3.8356640338897705, "learning_rate": 4.749e-06, "loss": 0.2878, "step": 1586 }, { "epoch": 8.91830985915493, "grad_norm": 1.0633769035339355, "learning_rate": 4.752e-06, "loss": 0.193, "step": 1587 }, { "epoch": 8.923943661971832, "grad_norm": 1.6502964496612549, "learning_rate": 4.755e-06, "loss": 0.215, "step": 1588 }, { "epoch": 8.929577464788732, "grad_norm": 2.322570562362671, "learning_rate": 4.758e-06, "loss": 0.2535, "step": 1589 }, { "epoch": 8.935211267605634, "grad_norm": 1.421547532081604, "learning_rate": 4.761000000000001e-06, "loss": 0.1805, "step": 1590 }, { "epoch": 8.940845070422535, "grad_norm": 1.952649712562561, "learning_rate": 4.764e-06, "loss": 0.1698, "step": 1591 }, { "epoch": 8.946478873239437, "grad_norm": 1.2020028829574585, "learning_rate": 4.767e-06, "loss": 0.2348, "step": 1592 }, { "epoch": 8.952112676056338, "grad_norm": 1.1883083581924438, "learning_rate": 4.77e-06, "loss": 0.1751, "step": 1593 }, { "epoch": 8.95774647887324, "grad_norm": 1.50223708152771, "learning_rate": 4.773e-06, "loss": 0.254, "step": 1594 }, { "epoch": 8.96338028169014, "grad_norm": 1.54977285861969, "learning_rate": 4.7760000000000005e-06, "loss": 0.2239, "step": 1595 }, { "epoch": 8.969014084507043, "grad_norm": 1.5210049152374268, "learning_rate": 4.779e-06, "loss": 0.1432, "step": 1596 }, { "epoch": 8.974647887323943, "grad_norm": 1.4679100513458252, "learning_rate": 4.782e-06, "loss": 0.1825, "step": 1597 }, { "epoch": 8.980281690140846, "grad_norm": 1.2444082498550415, "learning_rate": 4.785e-06, "loss": 0.1811, "step": 1598 }, { "epoch": 8.985915492957746, "grad_norm": 3.0096662044525146, "learning_rate": 4.788e-06, "loss": 0.1855, "step": 1599 }, { "epoch": 8.991549295774648, "grad_norm": 1.5335373878479004, "learning_rate": 4.791e-06, "loss": 0.1572, "step": 1600 }, { "epoch": 8.997183098591549, "grad_norm": 2.143509864807129, "learning_rate": 4.794e-06, "loss": 0.3803, "step": 1601 }, { "epoch": 9.0, "grad_norm": 2.404008150100708, "learning_rate": 4.797e-06, "loss": 0.0551, "step": 1602 }, { "epoch": 9.005633802816902, "grad_norm": 2.1751649379730225, "learning_rate": 4.800000000000001e-06, "loss": 0.4239, "step": 1603 }, { "epoch": 9.011267605633803, "grad_norm": 1.6735308170318604, "learning_rate": 4.803e-06, "loss": 0.4182, "step": 1604 }, { "epoch": 9.016901408450705, "grad_norm": 1.6804015636444092, "learning_rate": 4.806e-06, "loss": 0.5092, "step": 1605 }, { "epoch": 9.022535211267606, "grad_norm": 2.3872175216674805, "learning_rate": 4.809e-06, "loss": 0.3611, "step": 1606 }, { "epoch": 9.028169014084508, "grad_norm": 1.3963783979415894, "learning_rate": 4.812e-06, "loss": 0.3796, "step": 1607 }, { "epoch": 9.033802816901408, "grad_norm": 8.51320743560791, "learning_rate": 4.8150000000000005e-06, "loss": 0.4125, "step": 1608 }, { "epoch": 9.03943661971831, "grad_norm": 1.8216145038604736, "learning_rate": 4.818e-06, "loss": 0.3891, "step": 1609 }, { "epoch": 9.045070422535211, "grad_norm": 1.4720280170440674, "learning_rate": 4.821e-06, "loss": 0.3147, "step": 1610 }, { "epoch": 9.050704225352113, "grad_norm": 1.3989057540893555, "learning_rate": 4.824e-06, "loss": 0.2614, "step": 1611 }, { "epoch": 9.056338028169014, "grad_norm": 1.2691177129745483, "learning_rate": 4.827e-06, "loss": 0.333, "step": 1612 }, { "epoch": 9.061971830985916, "grad_norm": 1.369696855545044, "learning_rate": 4.83e-06, "loss": 0.3234, "step": 1613 }, { "epoch": 9.067605633802817, "grad_norm": 1.1861156225204468, "learning_rate": 4.833e-06, "loss": 0.2896, "step": 1614 }, { "epoch": 9.073239436619719, "grad_norm": 1.3370124101638794, "learning_rate": 4.836000000000001e-06, "loss": 0.2718, "step": 1615 }, { "epoch": 9.07887323943662, "grad_norm": 3.0094714164733887, "learning_rate": 4.839e-06, "loss": 0.3982, "step": 1616 }, { "epoch": 9.084507042253522, "grad_norm": 1.3236639499664307, "learning_rate": 4.8419999999999996e-06, "loss": 0.3479, "step": 1617 }, { "epoch": 9.090140845070422, "grad_norm": 1.7041285037994385, "learning_rate": 4.845e-06, "loss": 0.2784, "step": 1618 }, { "epoch": 9.095774647887325, "grad_norm": 1.8562099933624268, "learning_rate": 4.848e-06, "loss": 0.2703, "step": 1619 }, { "epoch": 9.101408450704225, "grad_norm": 1.1025969982147217, "learning_rate": 4.851000000000001e-06, "loss": 0.2459, "step": 1620 }, { "epoch": 9.107042253521128, "grad_norm": 1.302261471748352, "learning_rate": 4.8540000000000005e-06, "loss": 0.244, "step": 1621 }, { "epoch": 9.112676056338028, "grad_norm": 1.4352957010269165, "learning_rate": 4.856999999999999e-06, "loss": 0.264, "step": 1622 }, { "epoch": 9.11830985915493, "grad_norm": 1.2667394876480103, "learning_rate": 4.86e-06, "loss": 0.2033, "step": 1623 }, { "epoch": 9.12394366197183, "grad_norm": 2.2752907276153564, "learning_rate": 4.863e-06, "loss": 0.3531, "step": 1624 }, { "epoch": 9.129577464788733, "grad_norm": 1.5110918283462524, "learning_rate": 4.8660000000000005e-06, "loss": 0.3109, "step": 1625 }, { "epoch": 9.135211267605634, "grad_norm": 2.260697603225708, "learning_rate": 4.869e-06, "loss": 0.2065, "step": 1626 }, { "epoch": 9.140845070422536, "grad_norm": 1.4357144832611084, "learning_rate": 4.872e-06, "loss": 0.2142, "step": 1627 }, { "epoch": 9.146478873239436, "grad_norm": 1.1615310907363892, "learning_rate": 4.875e-06, "loss": 0.1753, "step": 1628 }, { "epoch": 9.152112676056339, "grad_norm": 1.850474238395691, "learning_rate": 4.878e-06, "loss": 0.2034, "step": 1629 }, { "epoch": 9.15774647887324, "grad_norm": 1.4347416162490845, "learning_rate": 4.881e-06, "loss": 0.1497, "step": 1630 }, { "epoch": 9.163380281690142, "grad_norm": 2.7112374305725098, "learning_rate": 4.884e-06, "loss": 0.2263, "step": 1631 }, { "epoch": 9.169014084507042, "grad_norm": 1.7354881763458252, "learning_rate": 4.887e-06, "loss": 0.2391, "step": 1632 }, { "epoch": 9.174647887323944, "grad_norm": 1.4367443323135376, "learning_rate": 4.890000000000001e-06, "loss": 0.1361, "step": 1633 }, { "epoch": 9.180281690140845, "grad_norm": 1.7989848852157593, "learning_rate": 4.8929999999999996e-06, "loss": 0.2463, "step": 1634 }, { "epoch": 9.185915492957747, "grad_norm": 1.8514043092727661, "learning_rate": 4.896e-06, "loss": 0.2333, "step": 1635 }, { "epoch": 9.191549295774648, "grad_norm": 1.2352797985076904, "learning_rate": 4.899e-06, "loss": 0.1282, "step": 1636 }, { "epoch": 9.19718309859155, "grad_norm": 1.4813181161880493, "learning_rate": 4.902e-06, "loss": 0.2365, "step": 1637 }, { "epoch": 9.20281690140845, "grad_norm": 1.1985927820205688, "learning_rate": 4.9050000000000005e-06, "loss": 0.2196, "step": 1638 }, { "epoch": 9.208450704225353, "grad_norm": 1.273464322090149, "learning_rate": 4.908e-06, "loss": 0.1344, "step": 1639 }, { "epoch": 9.214084507042253, "grad_norm": 1.2411627769470215, "learning_rate": 4.911e-06, "loss": 0.1682, "step": 1640 }, { "epoch": 9.219718309859156, "grad_norm": 1.061728596687317, "learning_rate": 4.914e-06, "loss": 0.2299, "step": 1641 }, { "epoch": 9.225352112676056, "grad_norm": 3.950594425201416, "learning_rate": 4.917e-06, "loss": 0.1894, "step": 1642 }, { "epoch": 9.230985915492958, "grad_norm": 1.703122854232788, "learning_rate": 4.92e-06, "loss": 0.1974, "step": 1643 }, { "epoch": 9.236619718309859, "grad_norm": 1.2241867780685425, "learning_rate": 4.923e-06, "loss": 0.1303, "step": 1644 }, { "epoch": 9.242253521126761, "grad_norm": 2.2721853256225586, "learning_rate": 4.926000000000001e-06, "loss": 0.1947, "step": 1645 }, { "epoch": 9.247887323943662, "grad_norm": 1.2577937841415405, "learning_rate": 4.929000000000001e-06, "loss": 0.1489, "step": 1646 }, { "epoch": 9.253521126760564, "grad_norm": 1.7401518821716309, "learning_rate": 4.9319999999999995e-06, "loss": 0.5074, "step": 1647 }, { "epoch": 9.259154929577464, "grad_norm": 1.653003454208374, "learning_rate": 4.935e-06, "loss": 0.4338, "step": 1648 }, { "epoch": 9.264788732394367, "grad_norm": 1.5166079998016357, "learning_rate": 4.938e-06, "loss": 0.3689, "step": 1649 }, { "epoch": 9.270422535211267, "grad_norm": 1.4627536535263062, "learning_rate": 4.941000000000001e-06, "loss": 0.446, "step": 1650 }, { "epoch": 9.27605633802817, "grad_norm": 1.4029494524002075, "learning_rate": 4.9440000000000004e-06, "loss": 0.4033, "step": 1651 }, { "epoch": 9.28169014084507, "grad_norm": 1.744861125946045, "learning_rate": 4.947e-06, "loss": 0.4164, "step": 1652 }, { "epoch": 9.287323943661972, "grad_norm": 1.5045570135116577, "learning_rate": 4.95e-06, "loss": 0.2612, "step": 1653 }, { "epoch": 9.292957746478873, "grad_norm": 2.112058162689209, "learning_rate": 4.953e-06, "loss": 0.3286, "step": 1654 }, { "epoch": 9.298591549295775, "grad_norm": 1.2139626741409302, "learning_rate": 4.9560000000000005e-06, "loss": 0.3579, "step": 1655 }, { "epoch": 9.304225352112676, "grad_norm": 1.121368646621704, "learning_rate": 4.959e-06, "loss": 0.2707, "step": 1656 }, { "epoch": 9.309859154929578, "grad_norm": 1.7354106903076172, "learning_rate": 4.962e-06, "loss": 0.363, "step": 1657 }, { "epoch": 9.315492957746478, "grad_norm": 7.843500137329102, "learning_rate": 4.965000000000001e-06, "loss": 0.2982, "step": 1658 }, { "epoch": 9.32112676056338, "grad_norm": 1.2963048219680786, "learning_rate": 4.968e-06, "loss": 0.3076, "step": 1659 }, { "epoch": 9.326760563380281, "grad_norm": 1.0923117399215698, "learning_rate": 4.9709999999999995e-06, "loss": 0.2433, "step": 1660 }, { "epoch": 9.332394366197184, "grad_norm": 1.3678795099258423, "learning_rate": 4.974e-06, "loss": 0.3413, "step": 1661 }, { "epoch": 9.338028169014084, "grad_norm": 1.681631326675415, "learning_rate": 4.977e-06, "loss": 0.3604, "step": 1662 }, { "epoch": 9.343661971830986, "grad_norm": 1.291095495223999, "learning_rate": 4.980000000000001e-06, "loss": 0.2068, "step": 1663 }, { "epoch": 9.349295774647887, "grad_norm": 1.0174518823623657, "learning_rate": 4.983e-06, "loss": 0.2354, "step": 1664 }, { "epoch": 9.35492957746479, "grad_norm": 1.1103700399398804, "learning_rate": 4.985999999999999e-06, "loss": 0.1935, "step": 1665 }, { "epoch": 9.36056338028169, "grad_norm": 1.5191657543182373, "learning_rate": 4.989e-06, "loss": 0.2148, "step": 1666 }, { "epoch": 9.366197183098592, "grad_norm": 2.6407430171966553, "learning_rate": 4.992e-06, "loss": 0.3127, "step": 1667 }, { "epoch": 9.371830985915492, "grad_norm": 1.3886287212371826, "learning_rate": 4.9950000000000005e-06, "loss": 0.2496, "step": 1668 }, { "epoch": 9.377464788732395, "grad_norm": 1.3290154933929443, "learning_rate": 4.998e-06, "loss": 0.2918, "step": 1669 }, { "epoch": 9.383098591549295, "grad_norm": 1.5969303846359253, "learning_rate": 5.001e-06, "loss": 0.1669, "step": 1670 }, { "epoch": 9.388732394366198, "grad_norm": 2.0924878120422363, "learning_rate": 5.004e-06, "loss": 0.2498, "step": 1671 }, { "epoch": 9.394366197183098, "grad_norm": 0.89569491147995, "learning_rate": 5.007e-06, "loss": 0.1773, "step": 1672 }, { "epoch": 9.4, "grad_norm": 1.2081530094146729, "learning_rate": 5.01e-06, "loss": 0.2405, "step": 1673 }, { "epoch": 9.4056338028169, "grad_norm": 1.4741665124893188, "learning_rate": 5.013e-06, "loss": 0.1598, "step": 1674 }, { "epoch": 9.411267605633803, "grad_norm": 1.0000643730163574, "learning_rate": 5.016e-06, "loss": 0.235, "step": 1675 }, { "epoch": 9.416901408450704, "grad_norm": 1.1592191457748413, "learning_rate": 5.0190000000000006e-06, "loss": 0.2592, "step": 1676 }, { "epoch": 9.422535211267606, "grad_norm": 1.5941916704177856, "learning_rate": 5.0219999999999995e-06, "loss": 0.1768, "step": 1677 }, { "epoch": 9.428169014084506, "grad_norm": 2.502532720565796, "learning_rate": 5.025e-06, "loss": 0.1561, "step": 1678 }, { "epoch": 9.433802816901409, "grad_norm": 1.3516881465911865, "learning_rate": 5.028e-06, "loss": 0.1937, "step": 1679 }, { "epoch": 9.43943661971831, "grad_norm": 1.1611335277557373, "learning_rate": 5.031e-06, "loss": 0.1975, "step": 1680 }, { "epoch": 9.445070422535212, "grad_norm": 1.0306599140167236, "learning_rate": 5.034e-06, "loss": 0.1771, "step": 1681 }, { "epoch": 9.450704225352112, "grad_norm": 1.6772950887680054, "learning_rate": 5.037e-06, "loss": 0.1934, "step": 1682 }, { "epoch": 9.456338028169014, "grad_norm": 1.3397375345230103, "learning_rate": 5.04e-06, "loss": 0.1607, "step": 1683 }, { "epoch": 9.461971830985915, "grad_norm": 1.3354195356369019, "learning_rate": 5.043e-06, "loss": 0.1844, "step": 1684 }, { "epoch": 9.467605633802817, "grad_norm": 2.510239839553833, "learning_rate": 5.046e-06, "loss": 0.2194, "step": 1685 }, { "epoch": 9.473239436619718, "grad_norm": 3.4921457767486572, "learning_rate": 5.049e-06, "loss": 0.2221, "step": 1686 }, { "epoch": 9.47887323943662, "grad_norm": 1.218308687210083, "learning_rate": 5.052e-06, "loss": 0.1278, "step": 1687 }, { "epoch": 9.48450704225352, "grad_norm": 1.8006235361099243, "learning_rate": 5.055000000000001e-06, "loss": 0.2187, "step": 1688 }, { "epoch": 9.490140845070423, "grad_norm": 1.820412278175354, "learning_rate": 5.0580000000000005e-06, "loss": 0.1286, "step": 1689 }, { "epoch": 9.495774647887323, "grad_norm": 2.1954402923583984, "learning_rate": 5.0609999999999995e-06, "loss": 0.1655, "step": 1690 }, { "epoch": 9.501408450704226, "grad_norm": 2.1704976558685303, "learning_rate": 5.064e-06, "loss": 0.4829, "step": 1691 }, { "epoch": 9.507042253521126, "grad_norm": 1.384144902229309, "learning_rate": 5.067e-06, "loss": 0.4359, "step": 1692 }, { "epoch": 9.512676056338028, "grad_norm": 1.424005389213562, "learning_rate": 5.070000000000001e-06, "loss": 0.4372, "step": 1693 }, { "epoch": 9.518309859154929, "grad_norm": 2.225982666015625, "learning_rate": 5.073e-06, "loss": 0.4492, "step": 1694 }, { "epoch": 9.523943661971831, "grad_norm": 1.540827751159668, "learning_rate": 5.076e-06, "loss": 0.3207, "step": 1695 }, { "epoch": 9.529577464788732, "grad_norm": 1.4873822927474976, "learning_rate": 5.079e-06, "loss": 0.3691, "step": 1696 }, { "epoch": 9.535211267605634, "grad_norm": 1.4053980112075806, "learning_rate": 5.082e-06, "loss": 0.3207, "step": 1697 }, { "epoch": 9.540845070422534, "grad_norm": 1.5264461040496826, "learning_rate": 5.0850000000000004e-06, "loss": 0.4351, "step": 1698 }, { "epoch": 9.546478873239437, "grad_norm": 1.1135752201080322, "learning_rate": 5.088e-06, "loss": 0.3394, "step": 1699 }, { "epoch": 9.552112676056337, "grad_norm": 1.4383032321929932, "learning_rate": 5.091e-06, "loss": 0.2782, "step": 1700 }, { "epoch": 9.55774647887324, "grad_norm": 1.6936475038528442, "learning_rate": 5.094000000000001e-06, "loss": 0.3348, "step": 1701 }, { "epoch": 9.56338028169014, "grad_norm": 1.255472183227539, "learning_rate": 5.097e-06, "loss": 0.2828, "step": 1702 }, { "epoch": 9.569014084507042, "grad_norm": 1.3889168500900269, "learning_rate": 5.1e-06, "loss": 0.2969, "step": 1703 }, { "epoch": 9.574647887323943, "grad_norm": 1.4667749404907227, "learning_rate": 5.103e-06, "loss": 0.3454, "step": 1704 }, { "epoch": 9.580281690140845, "grad_norm": 2.171043872833252, "learning_rate": 5.106e-06, "loss": 0.2377, "step": 1705 }, { "epoch": 9.585915492957746, "grad_norm": 1.1556975841522217, "learning_rate": 5.1090000000000006e-06, "loss": 0.2295, "step": 1706 }, { "epoch": 9.591549295774648, "grad_norm": 1.045686960220337, "learning_rate": 5.112e-06, "loss": 0.1842, "step": 1707 }, { "epoch": 9.597183098591549, "grad_norm": 1.3039542436599731, "learning_rate": 5.115e-06, "loss": 0.3295, "step": 1708 }, { "epoch": 9.60281690140845, "grad_norm": 1.1722157001495361, "learning_rate": 5.118e-06, "loss": 0.1948, "step": 1709 }, { "epoch": 9.608450704225351, "grad_norm": 1.1104546785354614, "learning_rate": 5.121e-06, "loss": 0.2202, "step": 1710 }, { "epoch": 9.614084507042254, "grad_norm": 0.9361942410469055, "learning_rate": 5.124e-06, "loss": 0.2238, "step": 1711 }, { "epoch": 9.619718309859154, "grad_norm": 1.2315305471420288, "learning_rate": 5.127e-06, "loss": 0.2325, "step": 1712 }, { "epoch": 9.625352112676056, "grad_norm": 1.1951035261154175, "learning_rate": 5.130000000000001e-06, "loss": 0.2282, "step": 1713 }, { "epoch": 9.630985915492957, "grad_norm": 1.2106503248214722, "learning_rate": 5.133e-06, "loss": 0.2252, "step": 1714 }, { "epoch": 9.63661971830986, "grad_norm": 1.3121325969696045, "learning_rate": 5.136e-06, "loss": 0.217, "step": 1715 }, { "epoch": 9.642253521126761, "grad_norm": 1.4312717914581299, "learning_rate": 5.139e-06, "loss": 0.2502, "step": 1716 }, { "epoch": 9.647887323943662, "grad_norm": 1.684280276298523, "learning_rate": 5.142e-06, "loss": 0.2293, "step": 1717 }, { "epoch": 9.653521126760563, "grad_norm": 1.4697318077087402, "learning_rate": 5.145000000000001e-06, "loss": 0.1822, "step": 1718 }, { "epoch": 9.659154929577465, "grad_norm": 1.4716107845306396, "learning_rate": 5.1480000000000005e-06, "loss": 0.1778, "step": 1719 }, { "epoch": 9.664788732394367, "grad_norm": 1.4920672178268433, "learning_rate": 5.1509999999999995e-06, "loss": 0.2094, "step": 1720 }, { "epoch": 9.670422535211268, "grad_norm": 1.1652508974075317, "learning_rate": 5.154e-06, "loss": 0.2064, "step": 1721 }, { "epoch": 9.676056338028168, "grad_norm": 2.0216257572174072, "learning_rate": 5.157e-06, "loss": 0.1893, "step": 1722 }, { "epoch": 9.68169014084507, "grad_norm": 1.9957540035247803, "learning_rate": 5.16e-06, "loss": 0.291, "step": 1723 }, { "epoch": 9.687323943661973, "grad_norm": 1.1012542247772217, "learning_rate": 5.163e-06, "loss": 0.272, "step": 1724 }, { "epoch": 9.692957746478873, "grad_norm": 1.2914425134658813, "learning_rate": 5.166e-06, "loss": 0.1989, "step": 1725 }, { "epoch": 9.698591549295774, "grad_norm": 1.3908804655075073, "learning_rate": 5.169e-06, "loss": 0.2181, "step": 1726 }, { "epoch": 9.704225352112676, "grad_norm": 1.855908989906311, "learning_rate": 5.172e-06, "loss": 0.2784, "step": 1727 }, { "epoch": 9.709859154929578, "grad_norm": 1.3928791284561157, "learning_rate": 5.175e-06, "loss": 0.2519, "step": 1728 }, { "epoch": 9.715492957746479, "grad_norm": 1.9230273962020874, "learning_rate": 5.178e-06, "loss": 0.145, "step": 1729 }, { "epoch": 9.721126760563381, "grad_norm": 1.883042812347412, "learning_rate": 5.181e-06, "loss": 0.2296, "step": 1730 }, { "epoch": 9.726760563380282, "grad_norm": 1.7075492143630981, "learning_rate": 5.184000000000001e-06, "loss": 0.1485, "step": 1731 }, { "epoch": 9.732394366197184, "grad_norm": 6.484768390655518, "learning_rate": 5.1870000000000005e-06, "loss": 0.2028, "step": 1732 }, { "epoch": 9.738028169014084, "grad_norm": 1.9935224056243896, "learning_rate": 5.1899999999999994e-06, "loss": 0.215, "step": 1733 }, { "epoch": 9.743661971830987, "grad_norm": 1.9597669839859009, "learning_rate": 5.193e-06, "loss": 0.176, "step": 1734 }, { "epoch": 9.749295774647887, "grad_norm": 2.112300157546997, "learning_rate": 5.196e-06, "loss": 0.433, "step": 1735 }, { "epoch": 9.75492957746479, "grad_norm": 1.2114726305007935, "learning_rate": 5.1990000000000005e-06, "loss": 0.4177, "step": 1736 }, { "epoch": 9.76056338028169, "grad_norm": 1.549331784248352, "learning_rate": 5.202e-06, "loss": 0.3726, "step": 1737 }, { "epoch": 9.766197183098592, "grad_norm": 1.4799546003341675, "learning_rate": 5.205e-06, "loss": 0.3459, "step": 1738 }, { "epoch": 9.771830985915493, "grad_norm": 1.9181982278823853, "learning_rate": 5.208e-06, "loss": 0.3798, "step": 1739 }, { "epoch": 9.777464788732395, "grad_norm": 1.248927354812622, "learning_rate": 5.211e-06, "loss": 0.3264, "step": 1740 }, { "epoch": 9.783098591549296, "grad_norm": 1.184111475944519, "learning_rate": 5.214e-06, "loss": 0.2883, "step": 1741 }, { "epoch": 9.788732394366198, "grad_norm": 1.6016947031021118, "learning_rate": 5.217e-06, "loss": 0.3727, "step": 1742 }, { "epoch": 9.794366197183098, "grad_norm": 2.706996440887451, "learning_rate": 5.22e-06, "loss": 0.3576, "step": 1743 }, { "epoch": 9.8, "grad_norm": 1.058626413345337, "learning_rate": 5.223000000000001e-06, "loss": 0.3059, "step": 1744 }, { "epoch": 9.805633802816901, "grad_norm": 2.4785025119781494, "learning_rate": 5.226e-06, "loss": 0.3252, "step": 1745 }, { "epoch": 9.811267605633804, "grad_norm": 1.4219791889190674, "learning_rate": 5.229e-06, "loss": 0.2723, "step": 1746 }, { "epoch": 9.816901408450704, "grad_norm": 2.6879944801330566, "learning_rate": 5.232e-06, "loss": 0.3215, "step": 1747 }, { "epoch": 9.822535211267606, "grad_norm": 1.3310829401016235, "learning_rate": 5.235e-06, "loss": 0.2565, "step": 1748 }, { "epoch": 9.828169014084507, "grad_norm": 11.200881958007812, "learning_rate": 5.2380000000000005e-06, "loss": 0.2459, "step": 1749 }, { "epoch": 9.83380281690141, "grad_norm": 1.3812123537063599, "learning_rate": 5.241e-06, "loss": 0.282, "step": 1750 }, { "epoch": 9.83943661971831, "grad_norm": 1.116904616355896, "learning_rate": 5.244e-06, "loss": 0.2586, "step": 1751 }, { "epoch": 9.845070422535212, "grad_norm": 1.1873703002929688, "learning_rate": 5.247e-06, "loss": 0.2641, "step": 1752 }, { "epoch": 9.850704225352112, "grad_norm": 1.6516838073730469, "learning_rate": 5.25e-06, "loss": 0.2448, "step": 1753 }, { "epoch": 9.856338028169015, "grad_norm": 1.0986006259918213, "learning_rate": 5.253e-06, "loss": 0.1955, "step": 1754 }, { "epoch": 9.861971830985915, "grad_norm": 1.0698142051696777, "learning_rate": 5.256e-06, "loss": 0.2326, "step": 1755 }, { "epoch": 9.867605633802818, "grad_norm": 2.1450283527374268, "learning_rate": 5.259000000000001e-06, "loss": 0.215, "step": 1756 }, { "epoch": 9.873239436619718, "grad_norm": 1.1111681461334229, "learning_rate": 5.262e-06, "loss": 0.2038, "step": 1757 }, { "epoch": 9.87887323943662, "grad_norm": 0.9330956339836121, "learning_rate": 5.2649999999999996e-06, "loss": 0.1889, "step": 1758 }, { "epoch": 9.88450704225352, "grad_norm": 1.5611501932144165, "learning_rate": 5.268e-06, "loss": 0.1849, "step": 1759 }, { "epoch": 9.890140845070423, "grad_norm": 1.0615895986557007, "learning_rate": 5.271e-06, "loss": 0.209, "step": 1760 }, { "epoch": 9.895774647887324, "grad_norm": 1.3564084768295288, "learning_rate": 5.274000000000001e-06, "loss": 0.2, "step": 1761 }, { "epoch": 9.901408450704226, "grad_norm": 1.6916522979736328, "learning_rate": 5.2770000000000005e-06, "loss": 0.2362, "step": 1762 }, { "epoch": 9.907042253521126, "grad_norm": 1.5228526592254639, "learning_rate": 5.279999999999999e-06, "loss": 0.1889, "step": 1763 }, { "epoch": 9.912676056338029, "grad_norm": 0.9726183414459229, "learning_rate": 5.283e-06, "loss": 0.1861, "step": 1764 }, { "epoch": 9.91830985915493, "grad_norm": 1.0599619150161743, "learning_rate": 5.286e-06, "loss": 0.1698, "step": 1765 }, { "epoch": 9.923943661971832, "grad_norm": 47.07388687133789, "learning_rate": 5.2890000000000005e-06, "loss": 0.2392, "step": 1766 }, { "epoch": 9.929577464788732, "grad_norm": 1.3345016241073608, "learning_rate": 5.292e-06, "loss": 0.2087, "step": 1767 }, { "epoch": 9.935211267605634, "grad_norm": 1.6112972497940063, "learning_rate": 5.295e-06, "loss": 0.1356, "step": 1768 }, { "epoch": 9.940845070422535, "grad_norm": 0.9956724643707275, "learning_rate": 5.298e-06, "loss": 0.1781, "step": 1769 }, { "epoch": 9.946478873239437, "grad_norm": 1.5677871704101562, "learning_rate": 5.301e-06, "loss": 0.1776, "step": 1770 }, { "epoch": 9.952112676056338, "grad_norm": 1.4113849401474, "learning_rate": 5.304e-06, "loss": 0.1217, "step": 1771 }, { "epoch": 9.95774647887324, "grad_norm": 1.1521153450012207, "learning_rate": 5.307e-06, "loss": 0.1795, "step": 1772 }, { "epoch": 9.96338028169014, "grad_norm": 1.7775465250015259, "learning_rate": 5.31e-06, "loss": 0.1615, "step": 1773 }, { "epoch": 9.969014084507043, "grad_norm": 1.0774285793304443, "learning_rate": 5.313000000000001e-06, "loss": 0.1659, "step": 1774 }, { "epoch": 9.974647887323943, "grad_norm": 1.5444382429122925, "learning_rate": 5.3160000000000004e-06, "loss": 0.1339, "step": 1775 }, { "epoch": 9.980281690140846, "grad_norm": 1.1363102197647095, "learning_rate": 5.319e-06, "loss": 0.1432, "step": 1776 }, { "epoch": 9.985915492957746, "grad_norm": 1.413503646850586, "learning_rate": 5.322e-06, "loss": 0.1779, "step": 1777 }, { "epoch": 9.991549295774648, "grad_norm": 2.135429859161377, "learning_rate": 5.325e-06, "loss": 0.2813, "step": 1778 }, { "epoch": 9.997183098591549, "grad_norm": 1.5871760845184326, "learning_rate": 5.3280000000000005e-06, "loss": 0.3491, "step": 1779 }, { "epoch": 10.0, "grad_norm": 0.5673215985298157, "learning_rate": 5.331e-06, "loss": 0.0394, "step": 1780 }, { "epoch": 10.005633802816902, "grad_norm": 2.712233304977417, "learning_rate": 5.334000000000001e-06, "loss": 0.4423, "step": 1781 }, { "epoch": 10.011267605633803, "grad_norm": 1.3109948635101318, "learning_rate": 5.337e-06, "loss": 0.3052, "step": 1782 }, { "epoch": 10.016901408450705, "grad_norm": 1.5464428663253784, "learning_rate": 5.34e-06, "loss": 0.3315, "step": 1783 }, { "epoch": 10.022535211267606, "grad_norm": 2.0342888832092285, "learning_rate": 5.343e-06, "loss": 0.325, "step": 1784 }, { "epoch": 10.028169014084508, "grad_norm": 1.4336090087890625, "learning_rate": 5.346e-06, "loss": 0.251, "step": 1785 }, { "epoch": 10.033802816901408, "grad_norm": 1.1521306037902832, "learning_rate": 5.349e-06, "loss": 0.3195, "step": 1786 }, { "epoch": 10.03943661971831, "grad_norm": 1.322704792022705, "learning_rate": 5.352000000000001e-06, "loss": 0.2817, "step": 1787 }, { "epoch": 10.045070422535211, "grad_norm": 1.7066935300827026, "learning_rate": 5.3549999999999996e-06, "loss": 0.3311, "step": 1788 }, { "epoch": 10.050704225352113, "grad_norm": 1.097094178199768, "learning_rate": 5.358e-06, "loss": 0.3077, "step": 1789 }, { "epoch": 10.056338028169014, "grad_norm": 1.1890008449554443, "learning_rate": 5.361e-06, "loss": 0.2903, "step": 1790 }, { "epoch": 10.061971830985916, "grad_norm": 1.534787893295288, "learning_rate": 5.364e-06, "loss": 0.3153, "step": 1791 }, { "epoch": 10.067605633802817, "grad_norm": 1.12851083278656, "learning_rate": 5.3670000000000005e-06, "loss": 0.2295, "step": 1792 }, { "epoch": 10.073239436619719, "grad_norm": 1.1443442106246948, "learning_rate": 5.37e-06, "loss": 0.2333, "step": 1793 }, { "epoch": 10.07887323943662, "grad_norm": 1.2331843376159668, "learning_rate": 5.373e-06, "loss": 0.1936, "step": 1794 }, { "epoch": 10.084507042253522, "grad_norm": 2.185972213745117, "learning_rate": 5.376e-06, "loss": 0.2381, "step": 1795 }, { "epoch": 10.090140845070422, "grad_norm": 1.1668970584869385, "learning_rate": 5.379e-06, "loss": 0.2934, "step": 1796 }, { "epoch": 10.095774647887325, "grad_norm": 1.2957322597503662, "learning_rate": 5.382e-06, "loss": 0.222, "step": 1797 }, { "epoch": 10.101408450704225, "grad_norm": 1.0761736631393433, "learning_rate": 5.385e-06, "loss": 0.2377, "step": 1798 }, { "epoch": 10.107042253521128, "grad_norm": 1.205430030822754, "learning_rate": 5.388000000000001e-06, "loss": 0.1945, "step": 1799 }, { "epoch": 10.112676056338028, "grad_norm": 1.651803970336914, "learning_rate": 5.391e-06, "loss": 0.2466, "step": 1800 }, { "epoch": 10.11830985915493, "grad_norm": 1.5705289840698242, "learning_rate": 5.3939999999999995e-06, "loss": 0.2018, "step": 1801 }, { "epoch": 10.12394366197183, "grad_norm": 1.325864553451538, "learning_rate": 5.397e-06, "loss": 0.2786, "step": 1802 }, { "epoch": 10.129577464788733, "grad_norm": 1.1321028470993042, "learning_rate": 5.4e-06, "loss": 0.2239, "step": 1803 }, { "epoch": 10.135211267605634, "grad_norm": 1.231438159942627, "learning_rate": 5.403000000000001e-06, "loss": 0.1562, "step": 1804 }, { "epoch": 10.140845070422536, "grad_norm": 2.4172093868255615, "learning_rate": 5.406e-06, "loss": 0.2151, "step": 1805 }, { "epoch": 10.146478873239436, "grad_norm": 1.4648444652557373, "learning_rate": 5.408999999999999e-06, "loss": 0.2549, "step": 1806 }, { "epoch": 10.152112676056339, "grad_norm": 1.4741016626358032, "learning_rate": 5.412e-06, "loss": 0.1799, "step": 1807 }, { "epoch": 10.15774647887324, "grad_norm": 1.8245927095413208, "learning_rate": 5.415e-06, "loss": 0.1832, "step": 1808 }, { "epoch": 10.163380281690142, "grad_norm": 1.0400588512420654, "learning_rate": 5.4180000000000005e-06, "loss": 0.1708, "step": 1809 }, { "epoch": 10.169014084507042, "grad_norm": 0.9898479580879211, "learning_rate": 5.421e-06, "loss": 0.1632, "step": 1810 }, { "epoch": 10.174647887323944, "grad_norm": 1.2559927701950073, "learning_rate": 5.424e-06, "loss": 0.1235, "step": 1811 }, { "epoch": 10.180281690140845, "grad_norm": 1.174888253211975, "learning_rate": 5.427e-06, "loss": 0.2159, "step": 1812 }, { "epoch": 10.185915492957747, "grad_norm": 1.3528563976287842, "learning_rate": 5.43e-06, "loss": 0.3019, "step": 1813 }, { "epoch": 10.191549295774648, "grad_norm": 1.9978166818618774, "learning_rate": 5.433e-06, "loss": 0.1962, "step": 1814 }, { "epoch": 10.19718309859155, "grad_norm": 1.639421820640564, "learning_rate": 5.436e-06, "loss": 0.1795, "step": 1815 }, { "epoch": 10.20281690140845, "grad_norm": 1.1614779233932495, "learning_rate": 5.439e-06, "loss": 0.1224, "step": 1816 }, { "epoch": 10.208450704225353, "grad_norm": 1.178054928779602, "learning_rate": 5.442000000000001e-06, "loss": 0.1546, "step": 1817 }, { "epoch": 10.214084507042253, "grad_norm": 1.4051032066345215, "learning_rate": 5.445e-06, "loss": 0.1671, "step": 1818 }, { "epoch": 10.219718309859156, "grad_norm": 3.6973588466644287, "learning_rate": 5.448e-06, "loss": 0.3107, "step": 1819 }, { "epoch": 10.225352112676056, "grad_norm": 1.0492578744888306, "learning_rate": 5.451e-06, "loss": 0.1599, "step": 1820 }, { "epoch": 10.230985915492958, "grad_norm": 1.3890033960342407, "learning_rate": 5.454e-06, "loss": 0.1414, "step": 1821 }, { "epoch": 10.236619718309859, "grad_norm": 1.4057667255401611, "learning_rate": 5.4570000000000004e-06, "loss": 0.1078, "step": 1822 }, { "epoch": 10.242253521126761, "grad_norm": 1.7399905920028687, "learning_rate": 5.46e-06, "loss": 0.1083, "step": 1823 }, { "epoch": 10.247887323943662, "grad_norm": 2.5505495071411133, "learning_rate": 5.463000000000001e-06, "loss": 0.1862, "step": 1824 }, { "epoch": 10.253521126760564, "grad_norm": 1.6197267770767212, "learning_rate": 5.466e-06, "loss": 0.3243, "step": 1825 }, { "epoch": 10.259154929577464, "grad_norm": 1.50508451461792, "learning_rate": 5.469e-06, "loss": 0.4629, "step": 1826 }, { "epoch": 10.264788732394367, "grad_norm": 1.3001490831375122, "learning_rate": 5.472e-06, "loss": 0.3645, "step": 1827 }, { "epoch": 10.270422535211267, "grad_norm": 1.1485068798065186, "learning_rate": 5.475e-06, "loss": 0.3275, "step": 1828 }, { "epoch": 10.27605633802817, "grad_norm": 1.5922435522079468, "learning_rate": 5.478000000000001e-06, "loss": 0.3314, "step": 1829 }, { "epoch": 10.28169014084507, "grad_norm": 1.1420609951019287, "learning_rate": 5.4810000000000005e-06, "loss": 0.3578, "step": 1830 }, { "epoch": 10.287323943661972, "grad_norm": 1.2820889949798584, "learning_rate": 5.4839999999999995e-06, "loss": 0.3451, "step": 1831 }, { "epoch": 10.292957746478873, "grad_norm": 1.648556113243103, "learning_rate": 5.487e-06, "loss": 0.3187, "step": 1832 }, { "epoch": 10.298591549295775, "grad_norm": 1.0801588296890259, "learning_rate": 5.49e-06, "loss": 0.2918, "step": 1833 }, { "epoch": 10.304225352112676, "grad_norm": 2.102177381515503, "learning_rate": 5.493000000000001e-06, "loss": 0.3392, "step": 1834 }, { "epoch": 10.309859154929578, "grad_norm": 1.1850378513336182, "learning_rate": 5.496e-06, "loss": 0.3015, "step": 1835 }, { "epoch": 10.315492957746478, "grad_norm": 1.20204758644104, "learning_rate": 5.499e-06, "loss": 0.2774, "step": 1836 }, { "epoch": 10.32112676056338, "grad_norm": 1.180106520652771, "learning_rate": 5.502e-06, "loss": 0.233, "step": 1837 }, { "epoch": 10.326760563380281, "grad_norm": 2.5235595703125, "learning_rate": 5.505e-06, "loss": 0.4166, "step": 1838 }, { "epoch": 10.332394366197184, "grad_norm": 1.8129650354385376, "learning_rate": 5.5080000000000005e-06, "loss": 0.229, "step": 1839 }, { "epoch": 10.338028169014084, "grad_norm": 1.2815568447113037, "learning_rate": 5.511e-06, "loss": 0.2234, "step": 1840 }, { "epoch": 10.343661971830986, "grad_norm": 1.5012456178665161, "learning_rate": 5.514e-06, "loss": 0.3164, "step": 1841 }, { "epoch": 10.349295774647887, "grad_norm": 1.0983575582504272, "learning_rate": 5.517000000000001e-06, "loss": 0.2325, "step": 1842 }, { "epoch": 10.35492957746479, "grad_norm": 1.1174354553222656, "learning_rate": 5.52e-06, "loss": 0.2189, "step": 1843 }, { "epoch": 10.36056338028169, "grad_norm": 1.1141961812973022, "learning_rate": 5.523e-06, "loss": 0.2226, "step": 1844 }, { "epoch": 10.366197183098592, "grad_norm": 2.277977228164673, "learning_rate": 5.526e-06, "loss": 0.2246, "step": 1845 }, { "epoch": 10.371830985915492, "grad_norm": 1.3135465383529663, "learning_rate": 5.529e-06, "loss": 0.1719, "step": 1846 }, { "epoch": 10.377464788732395, "grad_norm": 1.2691211700439453, "learning_rate": 5.5320000000000006e-06, "loss": 0.2077, "step": 1847 }, { "epoch": 10.383098591549295, "grad_norm": 1.4208314418792725, "learning_rate": 5.535e-06, "loss": 0.2899, "step": 1848 }, { "epoch": 10.388732394366198, "grad_norm": 6.182130813598633, "learning_rate": 5.537999999999999e-06, "loss": 0.189, "step": 1849 }, { "epoch": 10.394366197183098, "grad_norm": 0.9518446922302246, "learning_rate": 5.541e-06, "loss": 0.1451, "step": 1850 }, { "epoch": 10.4, "grad_norm": 1.36690354347229, "learning_rate": 5.544e-06, "loss": 0.2481, "step": 1851 }, { "epoch": 10.4056338028169, "grad_norm": 1.1673601865768433, "learning_rate": 5.547e-06, "loss": 0.2044, "step": 1852 }, { "epoch": 10.411267605633803, "grad_norm": 0.7720100283622742, "learning_rate": 5.55e-06, "loss": 0.136, "step": 1853 }, { "epoch": 10.416901408450704, "grad_norm": 1.0986549854278564, "learning_rate": 5.553e-06, "loss": 0.1564, "step": 1854 }, { "epoch": 10.422535211267606, "grad_norm": 1.3261797428131104, "learning_rate": 5.556e-06, "loss": 0.1798, "step": 1855 }, { "epoch": 10.428169014084506, "grad_norm": 0.9458950161933899, "learning_rate": 5.559e-06, "loss": 0.1505, "step": 1856 }, { "epoch": 10.433802816901409, "grad_norm": 1.2200461626052856, "learning_rate": 5.562e-06, "loss": 0.1302, "step": 1857 }, { "epoch": 10.43943661971831, "grad_norm": 1.4490344524383545, "learning_rate": 5.565e-06, "loss": 0.2029, "step": 1858 }, { "epoch": 10.445070422535212, "grad_norm": 1.106421947479248, "learning_rate": 5.568e-06, "loss": 0.1913, "step": 1859 }, { "epoch": 10.450704225352112, "grad_norm": 1.724700927734375, "learning_rate": 5.5710000000000005e-06, "loss": 0.2284, "step": 1860 }, { "epoch": 10.456338028169014, "grad_norm": 1.1539700031280518, "learning_rate": 5.574e-06, "loss": 0.1458, "step": 1861 }, { "epoch": 10.461971830985915, "grad_norm": 1.846913456916809, "learning_rate": 5.577e-06, "loss": 0.1891, "step": 1862 }, { "epoch": 10.467605633802817, "grad_norm": 1.6262789964675903, "learning_rate": 5.58e-06, "loss": 0.1118, "step": 1863 }, { "epoch": 10.473239436619718, "grad_norm": 1.6254218816757202, "learning_rate": 5.583e-06, "loss": 0.2152, "step": 1864 }, { "epoch": 10.47887323943662, "grad_norm": 1.2913464307785034, "learning_rate": 5.586e-06, "loss": 0.1445, "step": 1865 }, { "epoch": 10.48450704225352, "grad_norm": 1.5083683729171753, "learning_rate": 5.589e-06, "loss": 0.2232, "step": 1866 }, { "epoch": 10.490140845070423, "grad_norm": 3.0667624473571777, "learning_rate": 5.592000000000001e-06, "loss": 0.1734, "step": 1867 }, { "epoch": 10.495774647887323, "grad_norm": 1.3483014106750488, "learning_rate": 5.595e-06, "loss": 0.1249, "step": 1868 }, { "epoch": 10.501408450704226, "grad_norm": 2.2876298427581787, "learning_rate": 5.598e-06, "loss": 0.4289, "step": 1869 }, { "epoch": 10.507042253521126, "grad_norm": 1.1808561086654663, "learning_rate": 5.601e-06, "loss": 0.3492, "step": 1870 }, { "epoch": 10.512676056338028, "grad_norm": 1.1197243928909302, "learning_rate": 5.604e-06, "loss": 0.3903, "step": 1871 }, { "epoch": 10.518309859154929, "grad_norm": 1.048506736755371, "learning_rate": 5.607000000000001e-06, "loss": 0.3046, "step": 1872 }, { "epoch": 10.523943661971831, "grad_norm": 1.0717236995697021, "learning_rate": 5.6100000000000005e-06, "loss": 0.3549, "step": 1873 }, { "epoch": 10.529577464788732, "grad_norm": 1.456915020942688, "learning_rate": 5.6129999999999995e-06, "loss": 0.3336, "step": 1874 }, { "epoch": 10.535211267605634, "grad_norm": 1.2871387004852295, "learning_rate": 5.616e-06, "loss": 0.3496, "step": 1875 }, { "epoch": 10.540845070422534, "grad_norm": 1.0277163982391357, "learning_rate": 5.619e-06, "loss": 0.2342, "step": 1876 }, { "epoch": 10.546478873239437, "grad_norm": 1.4404959678649902, "learning_rate": 5.6220000000000006e-06, "loss": 0.2896, "step": 1877 }, { "epoch": 10.552112676056337, "grad_norm": 1.2584831714630127, "learning_rate": 5.625e-06, "loss": 0.2591, "step": 1878 }, { "epoch": 10.55774647887324, "grad_norm": 1.0759950876235962, "learning_rate": 5.628e-06, "loss": 0.2986, "step": 1879 }, { "epoch": 10.56338028169014, "grad_norm": 1.306809425354004, "learning_rate": 5.631e-06, "loss": 0.2829, "step": 1880 }, { "epoch": 10.569014084507042, "grad_norm": 1.6921708583831787, "learning_rate": 5.634e-06, "loss": 0.3363, "step": 1881 }, { "epoch": 10.574647887323943, "grad_norm": 1.4461113214492798, "learning_rate": 5.637e-06, "loss": 0.2491, "step": 1882 }, { "epoch": 10.580281690140845, "grad_norm": 1.2552894353866577, "learning_rate": 5.64e-06, "loss": 0.344, "step": 1883 }, { "epoch": 10.585915492957746, "grad_norm": 1.1658101081848145, "learning_rate": 5.643e-06, "loss": 0.2323, "step": 1884 }, { "epoch": 10.591549295774648, "grad_norm": 0.9359063506126404, "learning_rate": 5.646000000000001e-06, "loss": 0.1858, "step": 1885 }, { "epoch": 10.597183098591549, "grad_norm": 1.1769253015518188, "learning_rate": 5.649e-06, "loss": 0.255, "step": 1886 }, { "epoch": 10.60281690140845, "grad_norm": 0.8949055671691895, "learning_rate": 5.652e-06, "loss": 0.1917, "step": 1887 }, { "epoch": 10.608450704225351, "grad_norm": 0.9992433190345764, "learning_rate": 5.655e-06, "loss": 0.1564, "step": 1888 }, { "epoch": 10.614084507042254, "grad_norm": 1.2740421295166016, "learning_rate": 5.658e-06, "loss": 0.2582, "step": 1889 }, { "epoch": 10.619718309859154, "grad_norm": 1.136183261871338, "learning_rate": 5.6610000000000005e-06, "loss": 0.2245, "step": 1890 }, { "epoch": 10.625352112676056, "grad_norm": 1.2586482763290405, "learning_rate": 5.664e-06, "loss": 0.2837, "step": 1891 }, { "epoch": 10.630985915492957, "grad_norm": 0.9256576299667358, "learning_rate": 5.667e-06, "loss": 0.1734, "step": 1892 }, { "epoch": 10.63661971830986, "grad_norm": 1.0539048910140991, "learning_rate": 5.67e-06, "loss": 0.1566, "step": 1893 }, { "epoch": 10.642253521126761, "grad_norm": 0.9111283421516418, "learning_rate": 5.673e-06, "loss": 0.1409, "step": 1894 }, { "epoch": 10.647887323943662, "grad_norm": 1.2662277221679688, "learning_rate": 5.676e-06, "loss": 0.1594, "step": 1895 }, { "epoch": 10.653521126760563, "grad_norm": 0.9073997735977173, "learning_rate": 5.679e-06, "loss": 0.1276, "step": 1896 }, { "epoch": 10.659154929577465, "grad_norm": 1.0275629758834839, "learning_rate": 5.682000000000001e-06, "loss": 0.1516, "step": 1897 }, { "epoch": 10.664788732394367, "grad_norm": 1.2246308326721191, "learning_rate": 5.685e-06, "loss": 0.1623, "step": 1898 }, { "epoch": 10.670422535211268, "grad_norm": 1.6580581665039062, "learning_rate": 5.688e-06, "loss": 0.1685, "step": 1899 }, { "epoch": 10.676056338028168, "grad_norm": 1.6456947326660156, "learning_rate": 5.691e-06, "loss": 0.2681, "step": 1900 }, { "epoch": 10.68169014084507, "grad_norm": 1.289567232131958, "learning_rate": 5.694e-06, "loss": 0.1741, "step": 1901 }, { "epoch": 10.687323943661973, "grad_norm": 1.335058569908142, "learning_rate": 5.697000000000001e-06, "loss": 0.173, "step": 1902 }, { "epoch": 10.692957746478873, "grad_norm": 1.0709600448608398, "learning_rate": 5.7000000000000005e-06, "loss": 0.1369, "step": 1903 }, { "epoch": 10.698591549295774, "grad_norm": 1.269669771194458, "learning_rate": 5.703e-06, "loss": 0.1906, "step": 1904 }, { "epoch": 10.704225352112676, "grad_norm": 1.0549677610397339, "learning_rate": 5.706e-06, "loss": 0.1561, "step": 1905 }, { "epoch": 10.709859154929578, "grad_norm": 2.047976493835449, "learning_rate": 5.709e-06, "loss": 0.1405, "step": 1906 }, { "epoch": 10.715492957746479, "grad_norm": 1.6894605159759521, "learning_rate": 5.7120000000000005e-06, "loss": 0.1962, "step": 1907 }, { "epoch": 10.721126760563381, "grad_norm": 1.018565058708191, "learning_rate": 5.715e-06, "loss": 0.164, "step": 1908 }, { "epoch": 10.726760563380282, "grad_norm": 4.234405517578125, "learning_rate": 5.718e-06, "loss": 0.1485, "step": 1909 }, { "epoch": 10.732394366197184, "grad_norm": 1.052425503730774, "learning_rate": 5.721000000000001e-06, "loss": 0.1416, "step": 1910 }, { "epoch": 10.738028169014084, "grad_norm": 1.763474702835083, "learning_rate": 5.724e-06, "loss": 0.1719, "step": 1911 }, { "epoch": 10.743661971830987, "grad_norm": 1.315769076347351, "learning_rate": 5.7269999999999995e-06, "loss": 0.1119, "step": 1912 }, { "epoch": 10.749295774647887, "grad_norm": 2.9767961502075195, "learning_rate": 5.73e-06, "loss": 0.514, "step": 1913 }, { "epoch": 10.75492957746479, "grad_norm": 2.0633254051208496, "learning_rate": 5.733e-06, "loss": 0.4159, "step": 1914 }, { "epoch": 10.76056338028169, "grad_norm": 1.5858473777770996, "learning_rate": 5.736000000000001e-06, "loss": 0.4213, "step": 1915 }, { "epoch": 10.766197183098592, "grad_norm": 1.5844579935073853, "learning_rate": 5.7390000000000004e-06, "loss": 0.4654, "step": 1916 }, { "epoch": 10.771830985915493, "grad_norm": 1.7236393690109253, "learning_rate": 5.741999999999999e-06, "loss": 0.402, "step": 1917 }, { "epoch": 10.777464788732395, "grad_norm": 2.2887051105499268, "learning_rate": 5.745e-06, "loss": 0.3667, "step": 1918 }, { "epoch": 10.783098591549296, "grad_norm": 1.9380217790603638, "learning_rate": 5.748e-06, "loss": 0.2985, "step": 1919 }, { "epoch": 10.788732394366198, "grad_norm": 2.2385096549987793, "learning_rate": 5.7510000000000005e-06, "loss": 0.3152, "step": 1920 }, { "epoch": 10.794366197183098, "grad_norm": 0.9649767875671387, "learning_rate": 5.754e-06, "loss": 0.2795, "step": 1921 }, { "epoch": 10.8, "grad_norm": 1.9379098415374756, "learning_rate": 5.757e-06, "loss": 0.2738, "step": 1922 }, { "epoch": 10.805633802816901, "grad_norm": 1.2937359809875488, "learning_rate": 5.76e-06, "loss": 0.2045, "step": 1923 }, { "epoch": 10.811267605633804, "grad_norm": 1.6028040647506714, "learning_rate": 5.763e-06, "loss": 0.2753, "step": 1924 }, { "epoch": 10.816901408450704, "grad_norm": 1.5750566720962524, "learning_rate": 5.766e-06, "loss": 0.3305, "step": 1925 }, { "epoch": 10.822535211267606, "grad_norm": 1.0421193838119507, "learning_rate": 5.769e-06, "loss": 0.2304, "step": 1926 }, { "epoch": 10.828169014084507, "grad_norm": 1.3847806453704834, "learning_rate": 5.772e-06, "loss": 0.2422, "step": 1927 }, { "epoch": 10.83380281690141, "grad_norm": 1.4541956186294556, "learning_rate": 5.775000000000001e-06, "loss": 0.2209, "step": 1928 }, { "epoch": 10.83943661971831, "grad_norm": 1.1892116069793701, "learning_rate": 5.7779999999999996e-06, "loss": 0.2024, "step": 1929 }, { "epoch": 10.845070422535212, "grad_norm": 0.8462105989456177, "learning_rate": 5.781e-06, "loss": 0.1899, "step": 1930 }, { "epoch": 10.850704225352112, "grad_norm": 1.1286085844039917, "learning_rate": 5.784e-06, "loss": 0.2412, "step": 1931 }, { "epoch": 10.856338028169015, "grad_norm": 1.1305465698242188, "learning_rate": 5.787e-06, "loss": 0.1794, "step": 1932 }, { "epoch": 10.861971830985915, "grad_norm": 1.3042500019073486, "learning_rate": 5.7900000000000005e-06, "loss": 0.2348, "step": 1933 }, { "epoch": 10.867605633802818, "grad_norm": 1.1313916444778442, "learning_rate": 5.793e-06, "loss": 0.1958, "step": 1934 }, { "epoch": 10.873239436619718, "grad_norm": 1.133272409439087, "learning_rate": 5.796e-06, "loss": 0.1899, "step": 1935 }, { "epoch": 10.87887323943662, "grad_norm": 1.2506299018859863, "learning_rate": 5.799e-06, "loss": 0.1472, "step": 1936 }, { "epoch": 10.88450704225352, "grad_norm": 1.594029426574707, "learning_rate": 5.802e-06, "loss": 0.1941, "step": 1937 }, { "epoch": 10.890140845070423, "grad_norm": 1.5919842720031738, "learning_rate": 5.805e-06, "loss": 0.294, "step": 1938 }, { "epoch": 10.895774647887324, "grad_norm": 1.4221080541610718, "learning_rate": 5.808e-06, "loss": 0.1845, "step": 1939 }, { "epoch": 10.901408450704226, "grad_norm": 1.1703732013702393, "learning_rate": 5.811000000000001e-06, "loss": 0.1593, "step": 1940 }, { "epoch": 10.907042253521126, "grad_norm": 1.7795172929763794, "learning_rate": 5.814e-06, "loss": 0.1497, "step": 1941 }, { "epoch": 10.912676056338029, "grad_norm": 2.0907723903656006, "learning_rate": 5.8169999999999995e-06, "loss": 0.3258, "step": 1942 }, { "epoch": 10.91830985915493, "grad_norm": 1.3864856958389282, "learning_rate": 5.82e-06, "loss": 0.2019, "step": 1943 }, { "epoch": 10.923943661971832, "grad_norm": 1.2680069208145142, "learning_rate": 5.823e-06, "loss": 0.1822, "step": 1944 }, { "epoch": 10.929577464788732, "grad_norm": 1.6072678565979004, "learning_rate": 5.826000000000001e-06, "loss": 0.1808, "step": 1945 }, { "epoch": 10.935211267605634, "grad_norm": 1.5240446329116821, "learning_rate": 5.8290000000000004e-06, "loss": 0.2065, "step": 1946 }, { "epoch": 10.940845070422535, "grad_norm": 1.2194558382034302, "learning_rate": 5.832e-06, "loss": 0.1115, "step": 1947 }, { "epoch": 10.946478873239437, "grad_norm": 1.7684063911437988, "learning_rate": 5.835e-06, "loss": 0.1561, "step": 1948 }, { "epoch": 10.952112676056338, "grad_norm": 1.389906644821167, "learning_rate": 5.838e-06, "loss": 0.2175, "step": 1949 }, { "epoch": 10.95774647887324, "grad_norm": 1.2007676362991333, "learning_rate": 5.8410000000000005e-06, "loss": 0.1163, "step": 1950 }, { "epoch": 10.96338028169014, "grad_norm": 1.4932256937026978, "learning_rate": 5.844e-06, "loss": 0.1845, "step": 1951 }, { "epoch": 10.969014084507043, "grad_norm": 1.8414969444274902, "learning_rate": 5.847e-06, "loss": 0.1222, "step": 1952 }, { "epoch": 10.974647887323943, "grad_norm": 3.3165552616119385, "learning_rate": 5.850000000000001e-06, "loss": 0.1235, "step": 1953 }, { "epoch": 10.980281690140846, "grad_norm": 1.569212555885315, "learning_rate": 5.853e-06, "loss": 0.114, "step": 1954 }, { "epoch": 10.985915492957746, "grad_norm": 5.381613731384277, "learning_rate": 5.856e-06, "loss": 0.2605, "step": 1955 }, { "epoch": 10.991549295774648, "grad_norm": 1.5098671913146973, "learning_rate": 5.859e-06, "loss": 0.1392, "step": 1956 }, { "epoch": 10.997183098591549, "grad_norm": 1.2699979543685913, "learning_rate": 5.862e-06, "loss": 0.2562, "step": 1957 }, { "epoch": 11.0, "grad_norm": 0.7006288766860962, "learning_rate": 5.865000000000001e-06, "loss": 0.0416, "step": 1958 }, { "epoch": 11.005633802816902, "grad_norm": 1.9398581981658936, "learning_rate": 5.868e-06, "loss": 0.4747, "step": 1959 }, { "epoch": 11.011267605633803, "grad_norm": 1.516202688217163, "learning_rate": 5.871e-06, "loss": 0.3745, "step": 1960 }, { "epoch": 11.016901408450705, "grad_norm": 1.3685097694396973, "learning_rate": 5.874e-06, "loss": 0.3628, "step": 1961 }, { "epoch": 11.022535211267606, "grad_norm": 1.2770339250564575, "learning_rate": 5.877e-06, "loss": 0.3395, "step": 1962 }, { "epoch": 11.028169014084508, "grad_norm": 1.1790883541107178, "learning_rate": 5.8800000000000005e-06, "loss": 0.2992, "step": 1963 }, { "epoch": 11.033802816901408, "grad_norm": 1.3445945978164673, "learning_rate": 5.883e-06, "loss": 0.3073, "step": 1964 }, { "epoch": 11.03943661971831, "grad_norm": 1.3977150917053223, "learning_rate": 5.886000000000001e-06, "loss": 0.2665, "step": 1965 }, { "epoch": 11.045070422535211, "grad_norm": 1.085282564163208, "learning_rate": 5.889e-06, "loss": 0.3162, "step": 1966 }, { "epoch": 11.050704225352113, "grad_norm": 1.1160662174224854, "learning_rate": 5.892e-06, "loss": 0.3697, "step": 1967 }, { "epoch": 11.056338028169014, "grad_norm": 1.3317656517028809, "learning_rate": 5.895e-06, "loss": 0.2594, "step": 1968 }, { "epoch": 11.061971830985916, "grad_norm": 2.198195219039917, "learning_rate": 5.898e-06, "loss": 0.2783, "step": 1969 }, { "epoch": 11.067605633802817, "grad_norm": 1.0407682657241821, "learning_rate": 5.901000000000001e-06, "loss": 0.2395, "step": 1970 }, { "epoch": 11.073239436619719, "grad_norm": 2.153913736343384, "learning_rate": 5.9040000000000006e-06, "loss": 0.2111, "step": 1971 }, { "epoch": 11.07887323943662, "grad_norm": 1.362592101097107, "learning_rate": 5.9069999999999995e-06, "loss": 0.3087, "step": 1972 }, { "epoch": 11.084507042253522, "grad_norm": 1.2153325080871582, "learning_rate": 5.91e-06, "loss": 0.2409, "step": 1973 }, { "epoch": 11.090140845070422, "grad_norm": 3.34124755859375, "learning_rate": 5.913e-06, "loss": 0.2754, "step": 1974 }, { "epoch": 11.095774647887325, "grad_norm": 1.0153306722640991, "learning_rate": 5.916e-06, "loss": 0.2235, "step": 1975 }, { "epoch": 11.101408450704225, "grad_norm": 1.0891366004943848, "learning_rate": 5.919e-06, "loss": 0.2163, "step": 1976 }, { "epoch": 11.107042253521128, "grad_norm": 0.8543951511383057, "learning_rate": 5.922e-06, "loss": 0.1697, "step": 1977 }, { "epoch": 11.112676056338028, "grad_norm": 1.1735453605651855, "learning_rate": 5.925e-06, "loss": 0.2115, "step": 1978 }, { "epoch": 11.11830985915493, "grad_norm": 1.6706689596176147, "learning_rate": 5.928e-06, "loss": 0.1874, "step": 1979 }, { "epoch": 11.12394366197183, "grad_norm": 1.1694936752319336, "learning_rate": 5.931e-06, "loss": 0.1459, "step": 1980 }, { "epoch": 11.129577464788733, "grad_norm": 1.6189069747924805, "learning_rate": 5.934e-06, "loss": 0.2427, "step": 1981 }, { "epoch": 11.135211267605634, "grad_norm": 1.014825701713562, "learning_rate": 5.937e-06, "loss": 0.1619, "step": 1982 }, { "epoch": 11.140845070422536, "grad_norm": 1.4160979986190796, "learning_rate": 5.940000000000001e-06, "loss": 0.1785, "step": 1983 }, { "epoch": 11.146478873239436, "grad_norm": 1.2081398963928223, "learning_rate": 5.943e-06, "loss": 0.1776, "step": 1984 }, { "epoch": 11.152112676056339, "grad_norm": 1.2118706703186035, "learning_rate": 5.9459999999999995e-06, "loss": 0.1713, "step": 1985 }, { "epoch": 11.15774647887324, "grad_norm": 1.34224534034729, "learning_rate": 5.949e-06, "loss": 0.1718, "step": 1986 }, { "epoch": 11.163380281690142, "grad_norm": 1.0318557024002075, "learning_rate": 5.952e-06, "loss": 0.1364, "step": 1987 }, { "epoch": 11.169014084507042, "grad_norm": 1.7388615608215332, "learning_rate": 5.955000000000001e-06, "loss": 0.1452, "step": 1988 }, { "epoch": 11.174647887323944, "grad_norm": 1.2412067651748657, "learning_rate": 5.958e-06, "loss": 0.1842, "step": 1989 }, { "epoch": 11.180281690140845, "grad_norm": 1.0759446620941162, "learning_rate": 5.961e-06, "loss": 0.1182, "step": 1990 }, { "epoch": 11.185915492957747, "grad_norm": 1.525604248046875, "learning_rate": 5.964e-06, "loss": 0.1884, "step": 1991 }, { "epoch": 11.191549295774648, "grad_norm": 0.858163595199585, "learning_rate": 5.967e-06, "loss": 0.1402, "step": 1992 }, { "epoch": 11.19718309859155, "grad_norm": 0.8793714046478271, "learning_rate": 5.9700000000000004e-06, "loss": 0.1482, "step": 1993 }, { "epoch": 11.20281690140845, "grad_norm": 1.117171287536621, "learning_rate": 5.973e-06, "loss": 0.1722, "step": 1994 }, { "epoch": 11.208450704225353, "grad_norm": 1.9710646867752075, "learning_rate": 5.976e-06, "loss": 0.1752, "step": 1995 }, { "epoch": 11.214084507042253, "grad_norm": 1.0478922128677368, "learning_rate": 5.979000000000001e-06, "loss": 0.1148, "step": 1996 }, { "epoch": 11.219718309859156, "grad_norm": 0.8966274261474609, "learning_rate": 5.982e-06, "loss": 0.1181, "step": 1997 }, { "epoch": 11.225352112676056, "grad_norm": 2.336026191711426, "learning_rate": 5.985e-06, "loss": 0.1721, "step": 1998 }, { "epoch": 11.230985915492958, "grad_norm": 5.311186790466309, "learning_rate": 5.988e-06, "loss": 0.1358, "step": 1999 }, { "epoch": 11.236619718309859, "grad_norm": 1.0921125411987305, "learning_rate": 5.991e-06, "loss": 0.1207, "step": 2000 }, { "epoch": 11.236619718309859, "eval_cer": 0.1341472898359126, "eval_loss": 0.4130030572414398, "eval_runtime": 16.1071, "eval_samples_per_second": 18.874, "eval_steps_per_second": 0.621, "eval_wer": 0.47966231772831924, "step": 2000 }, { "epoch": 11.242253521126761, "grad_norm": 1.1482163667678833, "learning_rate": 5.9940000000000005e-06, "loss": 0.1543, "step": 2001 }, { "epoch": 11.247887323943662, "grad_norm": 1.5860090255737305, "learning_rate": 5.997e-06, "loss": 0.1386, "step": 2002 }, { "epoch": 11.253521126760564, "grad_norm": 1.6670833826065063, "learning_rate": 6e-06, "loss": 0.3837, "step": 2003 }, { "epoch": 11.259154929577464, "grad_norm": 1.2291259765625, "learning_rate": 6.003e-06, "loss": 0.319, "step": 2004 }, { "epoch": 11.264788732394367, "grad_norm": 1.5146945714950562, "learning_rate": 6.006e-06, "loss": 0.4055, "step": 2005 }, { "epoch": 11.270422535211267, "grad_norm": 1.086290717124939, "learning_rate": 6.009e-06, "loss": 0.301, "step": 2006 }, { "epoch": 11.27605633802817, "grad_norm": 1.3568214178085327, "learning_rate": 6.012e-06, "loss": 0.3427, "step": 2007 }, { "epoch": 11.28169014084507, "grad_norm": 1.0548985004425049, "learning_rate": 6.015000000000001e-06, "loss": 0.2872, "step": 2008 }, { "epoch": 11.287323943661972, "grad_norm": 1.0688239336013794, "learning_rate": 6.018e-06, "loss": 0.2375, "step": 2009 }, { "epoch": 11.292957746478873, "grad_norm": 1.0712275505065918, "learning_rate": 6.021e-06, "loss": 0.2934, "step": 2010 }, { "epoch": 11.298591549295775, "grad_norm": 0.9598928689956665, "learning_rate": 6.024e-06, "loss": 0.2436, "step": 2011 }, { "epoch": 11.304225352112676, "grad_norm": 1.1273407936096191, "learning_rate": 6.027e-06, "loss": 0.2729, "step": 2012 }, { "epoch": 11.309859154929578, "grad_norm": 0.9519910216331482, "learning_rate": 6.030000000000001e-06, "loss": 0.2192, "step": 2013 }, { "epoch": 11.315492957746478, "grad_norm": 1.2736563682556152, "learning_rate": 6.0330000000000005e-06, "loss": 0.2132, "step": 2014 }, { "epoch": 11.32112676056338, "grad_norm": 0.9460564851760864, "learning_rate": 6.0359999999999995e-06, "loss": 0.2367, "step": 2015 }, { "epoch": 11.326760563380281, "grad_norm": 1.231032133102417, "learning_rate": 6.039e-06, "loss": 0.1619, "step": 2016 }, { "epoch": 11.332394366197184, "grad_norm": 1.2977102994918823, "learning_rate": 6.042e-06, "loss": 0.3107, "step": 2017 }, { "epoch": 11.338028169014084, "grad_norm": 1.140352725982666, "learning_rate": 6.0450000000000006e-06, "loss": 0.2126, "step": 2018 }, { "epoch": 11.343661971830986, "grad_norm": 0.9523410797119141, "learning_rate": 6.048e-06, "loss": 0.1649, "step": 2019 }, { "epoch": 11.349295774647887, "grad_norm": 1.1438651084899902, "learning_rate": 6.051e-06, "loss": 0.2368, "step": 2020 }, { "epoch": 11.35492957746479, "grad_norm": 0.9031943082809448, "learning_rate": 6.054e-06, "loss": 0.1823, "step": 2021 }, { "epoch": 11.36056338028169, "grad_norm": 1.095728874206543, "learning_rate": 6.057e-06, "loss": 0.173, "step": 2022 }, { "epoch": 11.366197183098592, "grad_norm": 0.7604085206985474, "learning_rate": 6.0600000000000004e-06, "loss": 0.1478, "step": 2023 }, { "epoch": 11.371830985915492, "grad_norm": 1.3170181512832642, "learning_rate": 6.063e-06, "loss": 0.2287, "step": 2024 }, { "epoch": 11.377464788732395, "grad_norm": 0.9477013349533081, "learning_rate": 6.066e-06, "loss": 0.1785, "step": 2025 }, { "epoch": 11.383098591549295, "grad_norm": 1.2151979207992554, "learning_rate": 6.069000000000001e-06, "loss": 0.1901, "step": 2026 }, { "epoch": 11.388732394366198, "grad_norm": 1.1453232765197754, "learning_rate": 6.072e-06, "loss": 0.1567, "step": 2027 }, { "epoch": 11.394366197183098, "grad_norm": 1.3555009365081787, "learning_rate": 6.075e-06, "loss": 0.1503, "step": 2028 }, { "epoch": 11.4, "grad_norm": 1.478930950164795, "learning_rate": 6.078e-06, "loss": 0.1783, "step": 2029 }, { "epoch": 11.4056338028169, "grad_norm": 1.3874906301498413, "learning_rate": 6.081e-06, "loss": 0.1279, "step": 2030 }, { "epoch": 11.411267605633803, "grad_norm": 1.1926219463348389, "learning_rate": 6.0840000000000005e-06, "loss": 0.1505, "step": 2031 }, { "epoch": 11.416901408450704, "grad_norm": 1.5676566362380981, "learning_rate": 6.087e-06, "loss": 0.1982, "step": 2032 }, { "epoch": 11.422535211267606, "grad_norm": 0.9685735702514648, "learning_rate": 6.090000000000001e-06, "loss": 0.1032, "step": 2033 }, { "epoch": 11.428169014084506, "grad_norm": 1.186875581741333, "learning_rate": 6.093e-06, "loss": 0.1831, "step": 2034 }, { "epoch": 11.433802816901409, "grad_norm": 1.1626285314559937, "learning_rate": 6.096e-06, "loss": 0.206, "step": 2035 }, { "epoch": 11.43943661971831, "grad_norm": 1.4601824283599854, "learning_rate": 6.099e-06, "loss": 0.1384, "step": 2036 }, { "epoch": 11.445070422535212, "grad_norm": 4.186441421508789, "learning_rate": 6.102e-06, "loss": 0.1076, "step": 2037 }, { "epoch": 11.450704225352112, "grad_norm": 0.9134076833724976, "learning_rate": 6.105e-06, "loss": 0.125, "step": 2038 }, { "epoch": 11.456338028169014, "grad_norm": 2.9102015495300293, "learning_rate": 6.108000000000001e-06, "loss": 0.1859, "step": 2039 }, { "epoch": 11.461971830985915, "grad_norm": 1.1884194612503052, "learning_rate": 6.111e-06, "loss": 0.1172, "step": 2040 }, { "epoch": 11.467605633802817, "grad_norm": 1.3188227415084839, "learning_rate": 6.114e-06, "loss": 0.1583, "step": 2041 }, { "epoch": 11.473239436619718, "grad_norm": 1.3625084161758423, "learning_rate": 6.117e-06, "loss": 0.1631, "step": 2042 }, { "epoch": 11.47887323943662, "grad_norm": 0.8738672137260437, "learning_rate": 6.12e-06, "loss": 0.104, "step": 2043 }, { "epoch": 11.48450704225352, "grad_norm": 0.9803434014320374, "learning_rate": 6.1230000000000005e-06, "loss": 0.1, "step": 2044 }, { "epoch": 11.490140845070423, "grad_norm": 1.4971891641616821, "learning_rate": 6.126e-06, "loss": 0.1709, "step": 2045 }, { "epoch": 11.495774647887323, "grad_norm": 1.5734926462173462, "learning_rate": 6.129e-06, "loss": 0.1089, "step": 2046 }, { "epoch": 11.501408450704226, "grad_norm": 1.411415696144104, "learning_rate": 6.132e-06, "loss": 0.3311, "step": 2047 }, { "epoch": 11.507042253521126, "grad_norm": 2.074092388153076, "learning_rate": 6.135e-06, "loss": 0.3767, "step": 2048 }, { "epoch": 11.512676056338028, "grad_norm": 1.3877782821655273, "learning_rate": 6.138e-06, "loss": 0.2841, "step": 2049 }, { "epoch": 11.518309859154929, "grad_norm": 2.5066046714782715, "learning_rate": 6.141e-06, "loss": 0.4104, "step": 2050 }, { "epoch": 11.523943661971831, "grad_norm": 1.4148211479187012, "learning_rate": 6.144000000000001e-06, "loss": 0.3023, "step": 2051 }, { "epoch": 11.529577464788732, "grad_norm": 1.5432934761047363, "learning_rate": 6.147e-06, "loss": 0.3505, "step": 2052 }, { "epoch": 11.535211267605634, "grad_norm": 1.1038669347763062, "learning_rate": 6.1499999999999996e-06, "loss": 0.2478, "step": 2053 }, { "epoch": 11.540845070422534, "grad_norm": 1.4863101243972778, "learning_rate": 6.153e-06, "loss": 0.2994, "step": 2054 }, { "epoch": 11.546478873239437, "grad_norm": 1.748842716217041, "learning_rate": 6.156e-06, "loss": 0.3412, "step": 2055 }, { "epoch": 11.552112676056337, "grad_norm": 1.1960797309875488, "learning_rate": 6.159000000000001e-06, "loss": 0.2285, "step": 2056 }, { "epoch": 11.55774647887324, "grad_norm": 1.1405411958694458, "learning_rate": 6.1620000000000005e-06, "loss": 0.2694, "step": 2057 }, { "epoch": 11.56338028169014, "grad_norm": 1.213073968887329, "learning_rate": 6.164999999999999e-06, "loss": 0.2544, "step": 2058 }, { "epoch": 11.569014084507042, "grad_norm": 0.958168625831604, "learning_rate": 6.168e-06, "loss": 0.1945, "step": 2059 }, { "epoch": 11.574647887323943, "grad_norm": 0.9265267252922058, "learning_rate": 6.171e-06, "loss": 0.2065, "step": 2060 }, { "epoch": 11.580281690140845, "grad_norm": 1.1597423553466797, "learning_rate": 6.1740000000000005e-06, "loss": 0.2979, "step": 2061 }, { "epoch": 11.585915492957746, "grad_norm": 3.3253424167633057, "learning_rate": 6.177e-06, "loss": 0.1645, "step": 2062 }, { "epoch": 11.591549295774648, "grad_norm": 1.0621004104614258, "learning_rate": 6.18e-06, "loss": 0.1682, "step": 2063 }, { "epoch": 11.597183098591549, "grad_norm": 0.8203555345535278, "learning_rate": 6.183e-06, "loss": 0.2052, "step": 2064 }, { "epoch": 11.60281690140845, "grad_norm": 1.1918123960494995, "learning_rate": 6.186e-06, "loss": 0.2594, "step": 2065 }, { "epoch": 11.608450704225351, "grad_norm": 0.9690381288528442, "learning_rate": 6.189e-06, "loss": 0.1555, "step": 2066 }, { "epoch": 11.614084507042254, "grad_norm": 0.8412011861801147, "learning_rate": 6.192e-06, "loss": 0.183, "step": 2067 }, { "epoch": 11.619718309859154, "grad_norm": 1.1462820768356323, "learning_rate": 6.195e-06, "loss": 0.2564, "step": 2068 }, { "epoch": 11.625352112676056, "grad_norm": 1.3820849657058716, "learning_rate": 6.198000000000001e-06, "loss": 0.2422, "step": 2069 }, { "epoch": 11.630985915492957, "grad_norm": 1.207099199295044, "learning_rate": 6.201e-06, "loss": 0.2237, "step": 2070 }, { "epoch": 11.63661971830986, "grad_norm": 1.4738413095474243, "learning_rate": 6.204e-06, "loss": 0.1946, "step": 2071 }, { "epoch": 11.642253521126761, "grad_norm": 1.1953233480453491, "learning_rate": 6.207e-06, "loss": 0.2006, "step": 2072 }, { "epoch": 11.647887323943662, "grad_norm": 1.0370465517044067, "learning_rate": 6.21e-06, "loss": 0.1726, "step": 2073 }, { "epoch": 11.653521126760563, "grad_norm": 0.964539110660553, "learning_rate": 6.2130000000000005e-06, "loss": 0.1297, "step": 2074 }, { "epoch": 11.659154929577465, "grad_norm": 1.2024736404418945, "learning_rate": 6.216e-06, "loss": 0.1609, "step": 2075 }, { "epoch": 11.664788732394367, "grad_norm": 1.0568267107009888, "learning_rate": 6.219000000000001e-06, "loss": 0.1349, "step": 2076 }, { "epoch": 11.670422535211268, "grad_norm": 1.3605934381484985, "learning_rate": 6.222e-06, "loss": 0.1397, "step": 2077 }, { "epoch": 11.676056338028168, "grad_norm": 1.7064049243927002, "learning_rate": 6.225e-06, "loss": 0.2706, "step": 2078 }, { "epoch": 11.68169014084507, "grad_norm": 1.5377662181854248, "learning_rate": 6.228e-06, "loss": 0.1744, "step": 2079 }, { "epoch": 11.687323943661973, "grad_norm": 1.0948774814605713, "learning_rate": 6.231e-06, "loss": 0.1446, "step": 2080 }, { "epoch": 11.692957746478873, "grad_norm": 1.203718662261963, "learning_rate": 6.234000000000001e-06, "loss": 0.1324, "step": 2081 }, { "epoch": 11.698591549295774, "grad_norm": 1.2526240348815918, "learning_rate": 6.237000000000001e-06, "loss": 0.1458, "step": 2082 }, { "epoch": 11.704225352112676, "grad_norm": 1.3583401441574097, "learning_rate": 6.2399999999999995e-06, "loss": 0.1244, "step": 2083 }, { "epoch": 11.709859154929578, "grad_norm": 1.1355618238449097, "learning_rate": 6.243e-06, "loss": 0.1412, "step": 2084 }, { "epoch": 11.715492957746479, "grad_norm": 0.9804529547691345, "learning_rate": 6.246e-06, "loss": 0.0947, "step": 2085 }, { "epoch": 11.721126760563381, "grad_norm": 1.946309208869934, "learning_rate": 6.249000000000001e-06, "loss": 0.137, "step": 2086 }, { "epoch": 11.726760563380282, "grad_norm": 1.0956335067749023, "learning_rate": 6.2520000000000004e-06, "loss": 0.1365, "step": 2087 }, { "epoch": 11.732394366197184, "grad_norm": 1.726019024848938, "learning_rate": 6.255e-06, "loss": 0.1832, "step": 2088 }, { "epoch": 11.738028169014084, "grad_norm": 1.162100911140442, "learning_rate": 6.258e-06, "loss": 0.0957, "step": 2089 }, { "epoch": 11.743661971830987, "grad_norm": 6.184539794921875, "learning_rate": 6.261e-06, "loss": 0.126, "step": 2090 }, { "epoch": 11.749295774647887, "grad_norm": 2.751922369003296, "learning_rate": 6.2640000000000005e-06, "loss": 0.3792, "step": 2091 }, { "epoch": 11.75492957746479, "grad_norm": 1.61319100856781, "learning_rate": 6.267e-06, "loss": 0.3769, "step": 2092 }, { "epoch": 11.76056338028169, "grad_norm": 1.275633692741394, "learning_rate": 6.27e-06, "loss": 0.324, "step": 2093 }, { "epoch": 11.766197183098592, "grad_norm": 1.4428482055664062, "learning_rate": 6.273000000000001e-06, "loss": 0.3415, "step": 2094 }, { "epoch": 11.771830985915493, "grad_norm": 1.5393116474151611, "learning_rate": 6.276e-06, "loss": 0.2746, "step": 2095 }, { "epoch": 11.777464788732395, "grad_norm": 1.4083012342453003, "learning_rate": 6.279e-06, "loss": 0.3227, "step": 2096 }, { "epoch": 11.783098591549296, "grad_norm": 1.6151069402694702, "learning_rate": 6.282e-06, "loss": 0.3148, "step": 2097 }, { "epoch": 11.788732394366198, "grad_norm": 1.1314624547958374, "learning_rate": 6.285e-06, "loss": 0.2601, "step": 2098 }, { "epoch": 11.794366197183098, "grad_norm": 0.9362069964408875, "learning_rate": 6.288000000000001e-06, "loss": 0.1898, "step": 2099 }, { "epoch": 11.8, "grad_norm": 1.412224292755127, "learning_rate": 6.291e-06, "loss": 0.2376, "step": 2100 }, { "epoch": 11.805633802816901, "grad_norm": 1.9823790788650513, "learning_rate": 6.293999999999999e-06, "loss": 0.2844, "step": 2101 }, { "epoch": 11.811267605633804, "grad_norm": 1.1441813707351685, "learning_rate": 6.297e-06, "loss": 0.2702, "step": 2102 }, { "epoch": 11.816901408450704, "grad_norm": 1.573545217514038, "learning_rate": 6.3e-06, "loss": 0.2612, "step": 2103 }, { "epoch": 11.822535211267606, "grad_norm": 1.273963451385498, "learning_rate": 6.3030000000000005e-06, "loss": 0.3383, "step": 2104 }, { "epoch": 11.828169014084507, "grad_norm": 1.3633432388305664, "learning_rate": 6.306e-06, "loss": 0.204, "step": 2105 }, { "epoch": 11.83380281690141, "grad_norm": 0.9509096741676331, "learning_rate": 6.309e-06, "loss": 0.215, "step": 2106 }, { "epoch": 11.83943661971831, "grad_norm": 1.4638032913208008, "learning_rate": 6.312e-06, "loss": 0.2364, "step": 2107 }, { "epoch": 11.845070422535212, "grad_norm": 1.0630970001220703, "learning_rate": 6.315e-06, "loss": 0.1686, "step": 2108 }, { "epoch": 11.850704225352112, "grad_norm": 1.570688247680664, "learning_rate": 6.318e-06, "loss": 0.2201, "step": 2109 }, { "epoch": 11.856338028169015, "grad_norm": 1.1252381801605225, "learning_rate": 6.321e-06, "loss": 0.1689, "step": 2110 }, { "epoch": 11.861971830985915, "grad_norm": 1.6795021295547485, "learning_rate": 6.324e-06, "loss": 0.2043, "step": 2111 }, { "epoch": 11.867605633802818, "grad_norm": 1.2105810642242432, "learning_rate": 6.327000000000001e-06, "loss": 0.2344, "step": 2112 }, { "epoch": 11.873239436619718, "grad_norm": 1.7162758111953735, "learning_rate": 6.3299999999999995e-06, "loss": 0.1711, "step": 2113 }, { "epoch": 11.87887323943662, "grad_norm": 1.320256233215332, "learning_rate": 6.333e-06, "loss": 0.1619, "step": 2114 }, { "epoch": 11.88450704225352, "grad_norm": 0.8782104849815369, "learning_rate": 6.336e-06, "loss": 0.1252, "step": 2115 }, { "epoch": 11.890140845070423, "grad_norm": 0.9613125920295715, "learning_rate": 6.339e-06, "loss": 0.1573, "step": 2116 }, { "epoch": 11.895774647887324, "grad_norm": 1.6380764245986938, "learning_rate": 6.3420000000000004e-06, "loss": 0.1645, "step": 2117 }, { "epoch": 11.901408450704226, "grad_norm": 1.398338794708252, "learning_rate": 6.345e-06, "loss": 0.1584, "step": 2118 }, { "epoch": 11.907042253521126, "grad_norm": 0.7959722876548767, "learning_rate": 6.348000000000001e-06, "loss": 0.1237, "step": 2119 }, { "epoch": 11.912676056338029, "grad_norm": 1.6053969860076904, "learning_rate": 6.351e-06, "loss": 0.2548, "step": 2120 }, { "epoch": 11.91830985915493, "grad_norm": 0.8624702095985413, "learning_rate": 6.354e-06, "loss": 0.1198, "step": 2121 }, { "epoch": 11.923943661971832, "grad_norm": 1.7833203077316284, "learning_rate": 6.357e-06, "loss": 0.1543, "step": 2122 }, { "epoch": 11.929577464788732, "grad_norm": 1.2012407779693604, "learning_rate": 6.36e-06, "loss": 0.2014, "step": 2123 }, { "epoch": 11.935211267605634, "grad_norm": 1.1739535331726074, "learning_rate": 6.363000000000001e-06, "loss": 0.1549, "step": 2124 }, { "epoch": 11.940845070422535, "grad_norm": 0.9087754487991333, "learning_rate": 6.3660000000000005e-06, "loss": 0.1677, "step": 2125 }, { "epoch": 11.946478873239437, "grad_norm": 1.2092058658599854, "learning_rate": 6.3689999999999995e-06, "loss": 0.2086, "step": 2126 }, { "epoch": 11.952112676056338, "grad_norm": 1.0387578010559082, "learning_rate": 6.372e-06, "loss": 0.1549, "step": 2127 }, { "epoch": 11.95774647887324, "grad_norm": 0.8306543231010437, "learning_rate": 6.375e-06, "loss": 0.1099, "step": 2128 }, { "epoch": 11.96338028169014, "grad_norm": 0.9887404441833496, "learning_rate": 6.378000000000001e-06, "loss": 0.1782, "step": 2129 }, { "epoch": 11.969014084507043, "grad_norm": 1.492296576499939, "learning_rate": 6.381e-06, "loss": 0.1891, "step": 2130 }, { "epoch": 11.974647887323943, "grad_norm": 0.9206104874610901, "learning_rate": 6.384e-06, "loss": 0.0941, "step": 2131 }, { "epoch": 11.980281690140846, "grad_norm": 1.4595625400543213, "learning_rate": 6.387e-06, "loss": 0.1157, "step": 2132 }, { "epoch": 11.985915492957746, "grad_norm": 1.6814322471618652, "learning_rate": 6.39e-06, "loss": 0.1312, "step": 2133 }, { "epoch": 11.991549295774648, "grad_norm": 1.415077567100525, "learning_rate": 6.3930000000000005e-06, "loss": 0.1203, "step": 2134 }, { "epoch": 11.997183098591549, "grad_norm": 2.1616263389587402, "learning_rate": 6.396e-06, "loss": 0.3054, "step": 2135 }, { "epoch": 12.0, "grad_norm": 0.8917145133018494, "learning_rate": 6.399e-06, "loss": 0.093, "step": 2136 }, { "epoch": 12.005633802816902, "grad_norm": 1.6984648704528809, "learning_rate": 6.402000000000001e-06, "loss": 0.3949, "step": 2137 }, { "epoch": 12.011267605633803, "grad_norm": 1.4069889783859253, "learning_rate": 6.405e-06, "loss": 0.3864, "step": 2138 }, { "epoch": 12.016901408450705, "grad_norm": 1.416674017906189, "learning_rate": 6.408e-06, "loss": 0.3177, "step": 2139 }, { "epoch": 12.022535211267606, "grad_norm": 1.3459945917129517, "learning_rate": 6.411e-06, "loss": 0.3015, "step": 2140 }, { "epoch": 12.028169014084508, "grad_norm": 1.0046989917755127, "learning_rate": 6.414e-06, "loss": 0.2849, "step": 2141 }, { "epoch": 12.033802816901408, "grad_norm": 1.974661946296692, "learning_rate": 6.4170000000000006e-06, "loss": 0.2922, "step": 2142 }, { "epoch": 12.03943661971831, "grad_norm": 1.2300359010696411, "learning_rate": 6.42e-06, "loss": 0.253, "step": 2143 }, { "epoch": 12.045070422535211, "grad_norm": 1.2212858200073242, "learning_rate": 6.423e-06, "loss": 0.2342, "step": 2144 }, { "epoch": 12.050704225352113, "grad_norm": 0.8887746334075928, "learning_rate": 6.426e-06, "loss": 0.2426, "step": 2145 }, { "epoch": 12.056338028169014, "grad_norm": 1.4800262451171875, "learning_rate": 6.429e-06, "loss": 0.3353, "step": 2146 }, { "epoch": 12.061971830985916, "grad_norm": 1.019694209098816, "learning_rate": 6.432e-06, "loss": 0.2111, "step": 2147 }, { "epoch": 12.067605633802817, "grad_norm": 0.9660020470619202, "learning_rate": 6.435e-06, "loss": 0.2343, "step": 2148 }, { "epoch": 12.073239436619719, "grad_norm": 1.9195542335510254, "learning_rate": 6.438000000000001e-06, "loss": 0.3349, "step": 2149 }, { "epoch": 12.07887323943662, "grad_norm": 0.7935559749603271, "learning_rate": 6.441e-06, "loss": 0.1654, "step": 2150 }, { "epoch": 12.084507042253522, "grad_norm": 1.432407259941101, "learning_rate": 6.444e-06, "loss": 0.2491, "step": 2151 }, { "epoch": 12.090140845070422, "grad_norm": 0.9607966542243958, "learning_rate": 6.447e-06, "loss": 0.1831, "step": 2152 }, { "epoch": 12.095774647887325, "grad_norm": 10.865976333618164, "learning_rate": 6.45e-06, "loss": 0.2031, "step": 2153 }, { "epoch": 12.101408450704225, "grad_norm": 0.9519309997558594, "learning_rate": 6.453000000000001e-06, "loss": 0.1846, "step": 2154 }, { "epoch": 12.107042253521128, "grad_norm": 1.1624133586883545, "learning_rate": 6.4560000000000005e-06, "loss": 0.1906, "step": 2155 }, { "epoch": 12.112676056338028, "grad_norm": 1.657770037651062, "learning_rate": 6.4589999999999995e-06, "loss": 0.1891, "step": 2156 }, { "epoch": 12.11830985915493, "grad_norm": 2.0160698890686035, "learning_rate": 6.462e-06, "loss": 0.171, "step": 2157 }, { "epoch": 12.12394366197183, "grad_norm": 1.1082218885421753, "learning_rate": 6.465e-06, "loss": 0.1412, "step": 2158 }, { "epoch": 12.129577464788733, "grad_norm": 1.2455134391784668, "learning_rate": 6.468000000000001e-06, "loss": 0.1628, "step": 2159 }, { "epoch": 12.135211267605634, "grad_norm": 1.1360150575637817, "learning_rate": 6.471e-06, "loss": 0.1598, "step": 2160 }, { "epoch": 12.140845070422536, "grad_norm": 1.3835012912750244, "learning_rate": 6.474e-06, "loss": 0.2295, "step": 2161 }, { "epoch": 12.146478873239436, "grad_norm": 0.7931525111198425, "learning_rate": 6.477000000000001e-06, "loss": 0.1427, "step": 2162 }, { "epoch": 12.152112676056339, "grad_norm": 1.334067702293396, "learning_rate": 6.48e-06, "loss": 0.1525, "step": 2163 }, { "epoch": 12.15774647887324, "grad_norm": 0.7614358067512512, "learning_rate": 6.483e-06, "loss": 0.1347, "step": 2164 }, { "epoch": 12.163380281690142, "grad_norm": 1.285995364189148, "learning_rate": 6.486e-06, "loss": 0.1829, "step": 2165 }, { "epoch": 12.169014084507042, "grad_norm": 1.0123273134231567, "learning_rate": 6.489e-06, "loss": 0.1541, "step": 2166 }, { "epoch": 12.174647887323944, "grad_norm": 0.6961002945899963, "learning_rate": 6.492000000000001e-06, "loss": 0.1341, "step": 2167 }, { "epoch": 12.180281690140845, "grad_norm": 1.2021476030349731, "learning_rate": 6.4950000000000005e-06, "loss": 0.1654, "step": 2168 }, { "epoch": 12.185915492957747, "grad_norm": 0.9861935377120972, "learning_rate": 6.4979999999999994e-06, "loss": 0.1409, "step": 2169 }, { "epoch": 12.191549295774648, "grad_norm": 0.9621484279632568, "learning_rate": 6.501e-06, "loss": 0.1587, "step": 2170 }, { "epoch": 12.19718309859155, "grad_norm": 0.9368581175804138, "learning_rate": 6.504e-06, "loss": 0.1047, "step": 2171 }, { "epoch": 12.20281690140845, "grad_norm": 0.9132133722305298, "learning_rate": 6.5070000000000005e-06, "loss": 0.1071, "step": 2172 }, { "epoch": 12.208450704225353, "grad_norm": 1.285211205482483, "learning_rate": 6.51e-06, "loss": 0.134, "step": 2173 }, { "epoch": 12.214084507042253, "grad_norm": 0.9588539004325867, "learning_rate": 6.513e-06, "loss": 0.0964, "step": 2174 }, { "epoch": 12.219718309859156, "grad_norm": 1.968137502670288, "learning_rate": 6.516e-06, "loss": 0.203, "step": 2175 }, { "epoch": 12.225352112676056, "grad_norm": 1.5773708820343018, "learning_rate": 6.519e-06, "loss": 0.1672, "step": 2176 }, { "epoch": 12.230985915492958, "grad_norm": 1.0188342332839966, "learning_rate": 6.522e-06, "loss": 0.1183, "step": 2177 }, { "epoch": 12.236619718309859, "grad_norm": 1.2908991575241089, "learning_rate": 6.525e-06, "loss": 0.094, "step": 2178 }, { "epoch": 12.242253521126761, "grad_norm": 1.6380032300949097, "learning_rate": 6.528e-06, "loss": 0.135, "step": 2179 }, { "epoch": 12.247887323943662, "grad_norm": 1.0029958486557007, "learning_rate": 6.531000000000001e-06, "loss": 0.1104, "step": 2180 }, { "epoch": 12.253521126760564, "grad_norm": 1.6804022789001465, "learning_rate": 6.534e-06, "loss": 0.3099, "step": 2181 }, { "epoch": 12.259154929577464, "grad_norm": 1.4036184549331665, "learning_rate": 6.537e-06, "loss": 0.287, "step": 2182 }, { "epoch": 12.264788732394367, "grad_norm": 1.4057486057281494, "learning_rate": 6.54e-06, "loss": 0.3291, "step": 2183 }, { "epoch": 12.270422535211267, "grad_norm": 1.5506089925765991, "learning_rate": 6.543e-06, "loss": 0.3328, "step": 2184 }, { "epoch": 12.27605633802817, "grad_norm": 1.1244016885757446, "learning_rate": 6.5460000000000005e-06, "loss": 0.2848, "step": 2185 }, { "epoch": 12.28169014084507, "grad_norm": 1.2392929792404175, "learning_rate": 6.549e-06, "loss": 0.2391, "step": 2186 }, { "epoch": 12.287323943661972, "grad_norm": 1.30363130569458, "learning_rate": 6.552e-06, "loss": 0.2333, "step": 2187 }, { "epoch": 12.292957746478873, "grad_norm": 1.0746246576309204, "learning_rate": 6.555e-06, "loss": 0.2439, "step": 2188 }, { "epoch": 12.298591549295775, "grad_norm": 1.0101085901260376, "learning_rate": 6.558e-06, "loss": 0.2396, "step": 2189 }, { "epoch": 12.304225352112676, "grad_norm": 1.133170247077942, "learning_rate": 6.561e-06, "loss": 0.201, "step": 2190 }, { "epoch": 12.309859154929578, "grad_norm": 2.069068431854248, "learning_rate": 6.564e-06, "loss": 0.2319, "step": 2191 }, { "epoch": 12.315492957746478, "grad_norm": 1.1653518676757812, "learning_rate": 6.567000000000001e-06, "loss": 0.1979, "step": 2192 }, { "epoch": 12.32112676056338, "grad_norm": 1.6685208082199097, "learning_rate": 6.57e-06, "loss": 0.2719, "step": 2193 }, { "epoch": 12.326760563380281, "grad_norm": 1.3503191471099854, "learning_rate": 6.573e-06, "loss": 0.2121, "step": 2194 }, { "epoch": 12.332394366197184, "grad_norm": 1.197085976600647, "learning_rate": 6.576e-06, "loss": 0.2611, "step": 2195 }, { "epoch": 12.338028169014084, "grad_norm": 1.3264793157577515, "learning_rate": 6.579e-06, "loss": 0.2241, "step": 2196 }, { "epoch": 12.343661971830986, "grad_norm": 1.014402151107788, "learning_rate": 6.582000000000001e-06, "loss": 0.1743, "step": 2197 }, { "epoch": 12.349295774647887, "grad_norm": 1.0553693771362305, "learning_rate": 6.5850000000000005e-06, "loss": 0.1957, "step": 2198 }, { "epoch": 12.35492957746479, "grad_norm": 0.8102869987487793, "learning_rate": 6.5879999999999994e-06, "loss": 0.1246, "step": 2199 }, { "epoch": 12.36056338028169, "grad_norm": 1.0937633514404297, "learning_rate": 6.591e-06, "loss": 0.1866, "step": 2200 }, { "epoch": 12.366197183098592, "grad_norm": 1.4748212099075317, "learning_rate": 6.594e-06, "loss": 0.2217, "step": 2201 }, { "epoch": 12.371830985915492, "grad_norm": 1.4327399730682373, "learning_rate": 6.5970000000000005e-06, "loss": 0.1948, "step": 2202 }, { "epoch": 12.377464788732395, "grad_norm": 1.7146328687667847, "learning_rate": 6.6e-06, "loss": 0.2713, "step": 2203 }, { "epoch": 12.383098591549295, "grad_norm": 1.0207703113555908, "learning_rate": 6.603e-06, "loss": 0.144, "step": 2204 }, { "epoch": 12.388732394366198, "grad_norm": 1.075111746788025, "learning_rate": 6.606000000000001e-06, "loss": 0.1257, "step": 2205 }, { "epoch": 12.394366197183098, "grad_norm": 1.2496787309646606, "learning_rate": 6.609e-06, "loss": 0.1533, "step": 2206 }, { "epoch": 12.4, "grad_norm": 1.3731290102005005, "learning_rate": 6.612e-06, "loss": 0.194, "step": 2207 }, { "epoch": 12.4056338028169, "grad_norm": 1.7958683967590332, "learning_rate": 6.615e-06, "loss": 0.1537, "step": 2208 }, { "epoch": 12.411267605633803, "grad_norm": 0.8508431911468506, "learning_rate": 6.618e-06, "loss": 0.1086, "step": 2209 }, { "epoch": 12.416901408450704, "grad_norm": 0.898347795009613, "learning_rate": 6.621000000000001e-06, "loss": 0.1322, "step": 2210 }, { "epoch": 12.422535211267606, "grad_norm": 1.1445592641830444, "learning_rate": 6.6240000000000004e-06, "loss": 0.117, "step": 2211 }, { "epoch": 12.428169014084506, "grad_norm": 0.8736822605133057, "learning_rate": 6.627e-06, "loss": 0.1003, "step": 2212 }, { "epoch": 12.433802816901409, "grad_norm": 1.9266570806503296, "learning_rate": 6.63e-06, "loss": 0.3111, "step": 2213 }, { "epoch": 12.43943661971831, "grad_norm": 0.9109316468238831, "learning_rate": 6.633e-06, "loss": 0.1116, "step": 2214 }, { "epoch": 12.445070422535212, "grad_norm": 1.0418117046356201, "learning_rate": 6.6360000000000005e-06, "loss": 0.1196, "step": 2215 }, { "epoch": 12.450704225352112, "grad_norm": 1.6522717475891113, "learning_rate": 6.639e-06, "loss": 0.1692, "step": 2216 }, { "epoch": 12.456338028169014, "grad_norm": 0.7205647230148315, "learning_rate": 6.642000000000001e-06, "loss": 0.1432, "step": 2217 }, { "epoch": 12.461971830985915, "grad_norm": 0.8857249021530151, "learning_rate": 6.645e-06, "loss": 0.1027, "step": 2218 }, { "epoch": 12.467605633802817, "grad_norm": 0.9263817667961121, "learning_rate": 6.648e-06, "loss": 0.1226, "step": 2219 }, { "epoch": 12.473239436619718, "grad_norm": 0.98170405626297, "learning_rate": 6.651e-06, "loss": 0.1127, "step": 2220 }, { "epoch": 12.47887323943662, "grad_norm": 1.1023272275924683, "learning_rate": 6.654e-06, "loss": 0.1397, "step": 2221 }, { "epoch": 12.48450704225352, "grad_norm": 1.3979127407073975, "learning_rate": 6.657e-06, "loss": 0.1699, "step": 2222 }, { "epoch": 12.490140845070423, "grad_norm": 1.75055730342865, "learning_rate": 6.660000000000001e-06, "loss": 0.1577, "step": 2223 }, { "epoch": 12.495774647887323, "grad_norm": 0.7592055201530457, "learning_rate": 6.6629999999999996e-06, "loss": 0.0872, "step": 2224 }, { "epoch": 12.501408450704226, "grad_norm": 1.2398818731307983, "learning_rate": 6.666e-06, "loss": 0.4152, "step": 2225 }, { "epoch": 12.507042253521126, "grad_norm": 1.030767560005188, "learning_rate": 6.669e-06, "loss": 0.3784, "step": 2226 }, { "epoch": 12.512676056338028, "grad_norm": 1.109084129333496, "learning_rate": 6.672e-06, "loss": 0.3261, "step": 2227 }, { "epoch": 12.518309859154929, "grad_norm": 0.9165330529212952, "learning_rate": 6.6750000000000005e-06, "loss": 0.3133, "step": 2228 }, { "epoch": 12.523943661971831, "grad_norm": 1.2480812072753906, "learning_rate": 6.678e-06, "loss": 0.3473, "step": 2229 }, { "epoch": 12.529577464788732, "grad_norm": 2.1548471450805664, "learning_rate": 6.681e-06, "loss": 0.3528, "step": 2230 }, { "epoch": 12.535211267605634, "grad_norm": 1.3825241327285767, "learning_rate": 6.684e-06, "loss": 0.3216, "step": 2231 }, { "epoch": 12.540845070422534, "grad_norm": 2.3899247646331787, "learning_rate": 6.687e-06, "loss": 0.3398, "step": 2232 }, { "epoch": 12.546478873239437, "grad_norm": 1.1855460405349731, "learning_rate": 6.69e-06, "loss": 0.3115, "step": 2233 }, { "epoch": 12.552112676056337, "grad_norm": 1.336974859237671, "learning_rate": 6.693e-06, "loss": 0.2651, "step": 2234 }, { "epoch": 12.55774647887324, "grad_norm": 1.1475414037704468, "learning_rate": 6.696000000000001e-06, "loss": 0.2387, "step": 2235 }, { "epoch": 12.56338028169014, "grad_norm": 1.01780366897583, "learning_rate": 6.699e-06, "loss": 0.2369, "step": 2236 }, { "epoch": 12.569014084507042, "grad_norm": 1.2727125883102417, "learning_rate": 6.7019999999999995e-06, "loss": 0.2515, "step": 2237 }, { "epoch": 12.574647887323943, "grad_norm": 0.831050455570221, "learning_rate": 6.705e-06, "loss": 0.1449, "step": 2238 }, { "epoch": 12.580281690140845, "grad_norm": 1.0591498613357544, "learning_rate": 6.708e-06, "loss": 0.227, "step": 2239 }, { "epoch": 12.585915492957746, "grad_norm": 1.9926378726959229, "learning_rate": 6.711000000000001e-06, "loss": 0.1934, "step": 2240 }, { "epoch": 12.591549295774648, "grad_norm": 0.8209588527679443, "learning_rate": 6.7140000000000004e-06, "loss": 0.1624, "step": 2241 }, { "epoch": 12.597183098591549, "grad_norm": 1.4897950887680054, "learning_rate": 6.716999999999999e-06, "loss": 0.189, "step": 2242 }, { "epoch": 12.60281690140845, "grad_norm": 1.1313835382461548, "learning_rate": 6.72e-06, "loss": 0.1545, "step": 2243 }, { "epoch": 12.608450704225351, "grad_norm": 0.9980863332748413, "learning_rate": 6.723e-06, "loss": 0.1538, "step": 2244 }, { "epoch": 12.614084507042254, "grad_norm": 0.8531108498573303, "learning_rate": 6.7260000000000005e-06, "loss": 0.1654, "step": 2245 }, { "epoch": 12.619718309859154, "grad_norm": 1.5782039165496826, "learning_rate": 6.729e-06, "loss": 0.1617, "step": 2246 }, { "epoch": 12.625352112676056, "grad_norm": 1.4706830978393555, "learning_rate": 6.732e-06, "loss": 0.1492, "step": 2247 }, { "epoch": 12.630985915492957, "grad_norm": 0.9091673493385315, "learning_rate": 6.735000000000001e-06, "loss": 0.2051, "step": 2248 }, { "epoch": 12.63661971830986, "grad_norm": 1.1926296949386597, "learning_rate": 6.738e-06, "loss": 0.1183, "step": 2249 }, { "epoch": 12.642253521126761, "grad_norm": 1.1605890989303589, "learning_rate": 6.741e-06, "loss": 0.1642, "step": 2250 }, { "epoch": 12.647887323943662, "grad_norm": 2.8890974521636963, "learning_rate": 6.744e-06, "loss": 0.1447, "step": 2251 }, { "epoch": 12.653521126760563, "grad_norm": 1.033742070198059, "learning_rate": 6.747e-06, "loss": 0.1296, "step": 2252 }, { "epoch": 12.659154929577465, "grad_norm": 1.0825772285461426, "learning_rate": 6.750000000000001e-06, "loss": 0.1491, "step": 2253 }, { "epoch": 12.664788732394367, "grad_norm": 1.134958267211914, "learning_rate": 6.753e-06, "loss": 0.1175, "step": 2254 }, { "epoch": 12.670422535211268, "grad_norm": 0.6536986827850342, "learning_rate": 6.756e-06, "loss": 0.1017, "step": 2255 }, { "epoch": 12.676056338028168, "grad_norm": 3.814683437347412, "learning_rate": 6.759e-06, "loss": 0.1983, "step": 2256 }, { "epoch": 12.68169014084507, "grad_norm": 1.1226320266723633, "learning_rate": 6.762e-06, "loss": 0.1352, "step": 2257 }, { "epoch": 12.687323943661973, "grad_norm": 1.1591049432754517, "learning_rate": 6.7650000000000005e-06, "loss": 0.145, "step": 2258 }, { "epoch": 12.692957746478873, "grad_norm": 0.8642778992652893, "learning_rate": 6.768e-06, "loss": 0.1138, "step": 2259 }, { "epoch": 12.698591549295774, "grad_norm": 1.2827340364456177, "learning_rate": 6.771000000000001e-06, "loss": 0.1431, "step": 2260 }, { "epoch": 12.704225352112676, "grad_norm": 0.886278510093689, "learning_rate": 6.774e-06, "loss": 0.1109, "step": 2261 }, { "epoch": 12.709859154929578, "grad_norm": 0.9530730247497559, "learning_rate": 6.777e-06, "loss": 0.1372, "step": 2262 }, { "epoch": 12.715492957746479, "grad_norm": 0.9834749698638916, "learning_rate": 6.78e-06, "loss": 0.1539, "step": 2263 }, { "epoch": 12.721126760563381, "grad_norm": 1.6432827711105347, "learning_rate": 6.783e-06, "loss": 0.1165, "step": 2264 }, { "epoch": 12.726760563380282, "grad_norm": 0.9385227560997009, "learning_rate": 6.786000000000001e-06, "loss": 0.1088, "step": 2265 }, { "epoch": 12.732394366197184, "grad_norm": 0.7706801295280457, "learning_rate": 6.7890000000000006e-06, "loss": 0.1038, "step": 2266 }, { "epoch": 12.738028169014084, "grad_norm": 1.4348809719085693, "learning_rate": 6.7919999999999995e-06, "loss": 0.0924, "step": 2267 }, { "epoch": 12.743661971830987, "grad_norm": 1.3323912620544434, "learning_rate": 6.795e-06, "loss": 0.1399, "step": 2268 }, { "epoch": 12.749295774647887, "grad_norm": 1.2515511512756348, "learning_rate": 6.798e-06, "loss": 0.2773, "step": 2269 }, { "epoch": 12.75492957746479, "grad_norm": 1.3861656188964844, "learning_rate": 6.801000000000001e-06, "loss": 0.3519, "step": 2270 }, { "epoch": 12.76056338028169, "grad_norm": 1.2774341106414795, "learning_rate": 6.804e-06, "loss": 0.2761, "step": 2271 }, { "epoch": 12.766197183098592, "grad_norm": 1.0995910167694092, "learning_rate": 6.807e-06, "loss": 0.3187, "step": 2272 }, { "epoch": 12.771830985915493, "grad_norm": 0.8781499862670898, "learning_rate": 6.81e-06, "loss": 0.2701, "step": 2273 }, { "epoch": 12.777464788732395, "grad_norm": 0.963740885257721, "learning_rate": 6.813e-06, "loss": 0.2492, "step": 2274 }, { "epoch": 12.783098591549296, "grad_norm": 0.8591766953468323, "learning_rate": 6.8160000000000005e-06, "loss": 0.2459, "step": 2275 }, { "epoch": 12.788732394366198, "grad_norm": 1.018838882446289, "learning_rate": 6.819e-06, "loss": 0.2356, "step": 2276 }, { "epoch": 12.794366197183098, "grad_norm": 1.0732933282852173, "learning_rate": 6.822e-06, "loss": 0.2355, "step": 2277 }, { "epoch": 12.8, "grad_norm": 0.8466665744781494, "learning_rate": 6.825000000000001e-06, "loss": 0.1744, "step": 2278 }, { "epoch": 12.805633802816901, "grad_norm": 1.2690716981887817, "learning_rate": 6.828e-06, "loss": 0.214, "step": 2279 }, { "epoch": 12.811267605633804, "grad_norm": 1.5277619361877441, "learning_rate": 6.831e-06, "loss": 0.206, "step": 2280 }, { "epoch": 12.816901408450704, "grad_norm": 0.9415708780288696, "learning_rate": 6.834e-06, "loss": 0.2104, "step": 2281 }, { "epoch": 12.822535211267606, "grad_norm": 1.0906460285186768, "learning_rate": 6.837e-06, "loss": 0.2229, "step": 2282 }, { "epoch": 12.828169014084507, "grad_norm": 0.9036170840263367, "learning_rate": 6.840000000000001e-06, "loss": 0.1993, "step": 2283 }, { "epoch": 12.83380281690141, "grad_norm": 1.0504740476608276, "learning_rate": 6.843e-06, "loss": 0.2291, "step": 2284 }, { "epoch": 12.83943661971831, "grad_norm": 0.8701003193855286, "learning_rate": 6.845999999999999e-06, "loss": 0.1739, "step": 2285 }, { "epoch": 12.845070422535212, "grad_norm": 1.1530624628067017, "learning_rate": 6.849e-06, "loss": 0.1559, "step": 2286 }, { "epoch": 12.850704225352112, "grad_norm": 1.3934451341629028, "learning_rate": 6.852e-06, "loss": 0.3023, "step": 2287 }, { "epoch": 12.856338028169015, "grad_norm": 0.9556367993354797, "learning_rate": 6.8550000000000004e-06, "loss": 0.1388, "step": 2288 }, { "epoch": 12.861971830985915, "grad_norm": 0.8076000213623047, "learning_rate": 6.858e-06, "loss": 0.1537, "step": 2289 }, { "epoch": 12.867605633802818, "grad_norm": 0.920360267162323, "learning_rate": 6.861e-06, "loss": 0.1507, "step": 2290 }, { "epoch": 12.873239436619718, "grad_norm": 0.965334951877594, "learning_rate": 6.864000000000001e-06, "loss": 0.1774, "step": 2291 }, { "epoch": 12.87887323943662, "grad_norm": 0.9386708736419678, "learning_rate": 6.867e-06, "loss": 0.1657, "step": 2292 }, { "epoch": 12.88450704225352, "grad_norm": 1.5225797891616821, "learning_rate": 6.87e-06, "loss": 0.1549, "step": 2293 }, { "epoch": 12.890140845070423, "grad_norm": 0.9704172015190125, "learning_rate": 6.873e-06, "loss": 0.1347, "step": 2294 }, { "epoch": 12.895774647887324, "grad_norm": 1.090592861175537, "learning_rate": 6.876e-06, "loss": 0.2073, "step": 2295 }, { "epoch": 12.901408450704226, "grad_norm": 1.1433273553848267, "learning_rate": 6.8790000000000005e-06, "loss": 0.1223, "step": 2296 }, { "epoch": 12.907042253521126, "grad_norm": 0.8738981485366821, "learning_rate": 6.882e-06, "loss": 0.0948, "step": 2297 }, { "epoch": 12.912676056338029, "grad_norm": 1.7056903839111328, "learning_rate": 6.885e-06, "loss": 0.2145, "step": 2298 }, { "epoch": 12.91830985915493, "grad_norm": 3.190993309020996, "learning_rate": 6.888e-06, "loss": 0.1325, "step": 2299 }, { "epoch": 12.923943661971832, "grad_norm": 0.9311177134513855, "learning_rate": 6.891e-06, "loss": 0.1496, "step": 2300 }, { "epoch": 12.929577464788732, "grad_norm": 1.0775495767593384, "learning_rate": 6.894e-06, "loss": 0.1728, "step": 2301 }, { "epoch": 12.935211267605634, "grad_norm": 1.3665276765823364, "learning_rate": 6.897e-06, "loss": 0.1043, "step": 2302 }, { "epoch": 12.940845070422535, "grad_norm": 0.9709135293960571, "learning_rate": 6.900000000000001e-06, "loss": 0.167, "step": 2303 }, { "epoch": 12.946478873239437, "grad_norm": 0.8725993037223816, "learning_rate": 6.903e-06, "loss": 0.157, "step": 2304 }, { "epoch": 12.952112676056338, "grad_norm": 1.155639886856079, "learning_rate": 6.906e-06, "loss": 0.2045, "step": 2305 }, { "epoch": 12.95774647887324, "grad_norm": 1.1438989639282227, "learning_rate": 6.909e-06, "loss": 0.1159, "step": 2306 }, { "epoch": 12.96338028169014, "grad_norm": 1.1680606603622437, "learning_rate": 6.912e-06, "loss": 0.1317, "step": 2307 }, { "epoch": 12.969014084507043, "grad_norm": 1.15730881690979, "learning_rate": 6.915000000000001e-06, "loss": 0.1401, "step": 2308 }, { "epoch": 12.974647887323943, "grad_norm": 1.3509944677352905, "learning_rate": 6.9180000000000005e-06, "loss": 0.0954, "step": 2309 }, { "epoch": 12.980281690140846, "grad_norm": 1.4634404182434082, "learning_rate": 6.9209999999999995e-06, "loss": 0.1472, "step": 2310 }, { "epoch": 12.985915492957746, "grad_norm": 0.9491389393806458, "learning_rate": 6.924e-06, "loss": 0.0851, "step": 2311 }, { "epoch": 12.991549295774648, "grad_norm": 2.9997661113739014, "learning_rate": 6.927e-06, "loss": 0.1188, "step": 2312 }, { "epoch": 12.997183098591549, "grad_norm": 2.934652328491211, "learning_rate": 6.9300000000000006e-06, "loss": 0.2274, "step": 2313 }, { "epoch": 13.0, "grad_norm": 1.052297592163086, "learning_rate": 6.933e-06, "loss": 0.0465, "step": 2314 }, { "epoch": 13.005633802816902, "grad_norm": 1.7352118492126465, "learning_rate": 6.936e-06, "loss": 0.3561, "step": 2315 }, { "epoch": 13.011267605633803, "grad_norm": 1.3757953643798828, "learning_rate": 6.939e-06, "loss": 0.3396, "step": 2316 }, { "epoch": 13.016901408450705, "grad_norm": 1.3444186449050903, "learning_rate": 6.942e-06, "loss": 0.2892, "step": 2317 }, { "epoch": 13.022535211267606, "grad_norm": 1.123867154121399, "learning_rate": 6.945e-06, "loss": 0.3195, "step": 2318 }, { "epoch": 13.028169014084508, "grad_norm": 0.7584018707275391, "learning_rate": 6.948e-06, "loss": 0.2339, "step": 2319 }, { "epoch": 13.033802816901408, "grad_norm": 0.9216213822364807, "learning_rate": 6.951e-06, "loss": 0.26, "step": 2320 }, { "epoch": 13.03943661971831, "grad_norm": 1.055661678314209, "learning_rate": 6.954000000000001e-06, "loss": 0.2715, "step": 2321 }, { "epoch": 13.045070422535211, "grad_norm": 0.9697660207748413, "learning_rate": 6.957e-06, "loss": 0.3053, "step": 2322 }, { "epoch": 13.050704225352113, "grad_norm": 1.0436304807662964, "learning_rate": 6.96e-06, "loss": 0.2524, "step": 2323 }, { "epoch": 13.056338028169014, "grad_norm": 0.7810503244400024, "learning_rate": 6.963e-06, "loss": 0.19, "step": 2324 }, { "epoch": 13.061971830985916, "grad_norm": 0.9463015198707581, "learning_rate": 6.966e-06, "loss": 0.2051, "step": 2325 }, { "epoch": 13.067605633802817, "grad_norm": 1.0816895961761475, "learning_rate": 6.9690000000000005e-06, "loss": 0.2061, "step": 2326 }, { "epoch": 13.073239436619719, "grad_norm": 1.8795841932296753, "learning_rate": 6.972e-06, "loss": 0.2014, "step": 2327 }, { "epoch": 13.07887323943662, "grad_norm": 1.0947595834732056, "learning_rate": 6.975e-06, "loss": 0.1782, "step": 2328 }, { "epoch": 13.084507042253522, "grad_norm": 0.712000846862793, "learning_rate": 6.978e-06, "loss": 0.1781, "step": 2329 }, { "epoch": 13.090140845070422, "grad_norm": 0.742993950843811, "learning_rate": 6.981e-06, "loss": 0.1682, "step": 2330 }, { "epoch": 13.095774647887325, "grad_norm": 1.088239312171936, "learning_rate": 6.984e-06, "loss": 0.1556, "step": 2331 }, { "epoch": 13.101408450704225, "grad_norm": 1.0299497842788696, "learning_rate": 6.987e-06, "loss": 0.2097, "step": 2332 }, { "epoch": 13.107042253521128, "grad_norm": 0.915474534034729, "learning_rate": 6.990000000000001e-06, "loss": 0.1498, "step": 2333 }, { "epoch": 13.112676056338028, "grad_norm": 2.8872592449188232, "learning_rate": 6.993000000000001e-06, "loss": 0.2014, "step": 2334 }, { "epoch": 13.11830985915493, "grad_norm": 0.9793112874031067, "learning_rate": 6.996e-06, "loss": 0.1671, "step": 2335 }, { "epoch": 13.12394366197183, "grad_norm": 0.9295567274093628, "learning_rate": 6.999e-06, "loss": 0.1572, "step": 2336 }, { "epoch": 13.129577464788733, "grad_norm": 0.9504292607307434, "learning_rate": 7.002e-06, "loss": 0.1665, "step": 2337 }, { "epoch": 13.135211267605634, "grad_norm": 1.407444715499878, "learning_rate": 7.005000000000001e-06, "loss": 0.1137, "step": 2338 }, { "epoch": 13.140845070422536, "grad_norm": 1.7637865543365479, "learning_rate": 7.0080000000000005e-06, "loss": 0.1355, "step": 2339 }, { "epoch": 13.146478873239436, "grad_norm": 1.1098374128341675, "learning_rate": 7.011e-06, "loss": 0.1441, "step": 2340 }, { "epoch": 13.152112676056339, "grad_norm": 1.093401312828064, "learning_rate": 7.014e-06, "loss": 0.1679, "step": 2341 }, { "epoch": 13.15774647887324, "grad_norm": 0.9409353733062744, "learning_rate": 7.017e-06, "loss": 0.1133, "step": 2342 }, { "epoch": 13.163380281690142, "grad_norm": 1.076093077659607, "learning_rate": 7.0200000000000006e-06, "loss": 0.1202, "step": 2343 }, { "epoch": 13.169014084507042, "grad_norm": 0.7920619249343872, "learning_rate": 7.023e-06, "loss": 0.1093, "step": 2344 }, { "epoch": 13.174647887323944, "grad_norm": 0.7880189418792725, "learning_rate": 7.026e-06, "loss": 0.1192, "step": 2345 }, { "epoch": 13.180281690140845, "grad_norm": 1.24980890750885, "learning_rate": 7.029000000000001e-06, "loss": 0.1117, "step": 2346 }, { "epoch": 13.185915492957747, "grad_norm": 1.5097217559814453, "learning_rate": 7.032e-06, "loss": 0.104, "step": 2347 }, { "epoch": 13.191549295774648, "grad_norm": 1.7135924100875854, "learning_rate": 7.0349999999999996e-06, "loss": 0.0946, "step": 2348 }, { "epoch": 13.19718309859155, "grad_norm": 1.1639138460159302, "learning_rate": 7.038e-06, "loss": 0.1768, "step": 2349 }, { "epoch": 13.20281690140845, "grad_norm": 0.8357768654823303, "learning_rate": 7.041e-06, "loss": 0.0901, "step": 2350 }, { "epoch": 13.208450704225353, "grad_norm": 0.8521570563316345, "learning_rate": 7.044000000000001e-06, "loss": 0.1242, "step": 2351 }, { "epoch": 13.214084507042253, "grad_norm": 0.8252862095832825, "learning_rate": 7.0470000000000005e-06, "loss": 0.0702, "step": 2352 }, { "epoch": 13.219718309859156, "grad_norm": 2.0109314918518066, "learning_rate": 7.049999999999999e-06, "loss": 0.1213, "step": 2353 }, { "epoch": 13.225352112676056, "grad_norm": 1.2784870862960815, "learning_rate": 7.053e-06, "loss": 0.1762, "step": 2354 }, { "epoch": 13.230985915492958, "grad_norm": 1.216871976852417, "learning_rate": 7.056e-06, "loss": 0.0994, "step": 2355 }, { "epoch": 13.236619718309859, "grad_norm": 0.92425137758255, "learning_rate": 7.0590000000000005e-06, "loss": 0.1112, "step": 2356 }, { "epoch": 13.242253521126761, "grad_norm": 1.0674314498901367, "learning_rate": 7.062e-06, "loss": 0.0973, "step": 2357 }, { "epoch": 13.247887323943662, "grad_norm": 1.4772449731826782, "learning_rate": 7.065e-06, "loss": 0.1261, "step": 2358 }, { "epoch": 13.253521126760564, "grad_norm": 1.5397851467132568, "learning_rate": 7.068e-06, "loss": 0.3225, "step": 2359 }, { "epoch": 13.259154929577464, "grad_norm": 1.3334882259368896, "learning_rate": 7.071e-06, "loss": 0.2835, "step": 2360 }, { "epoch": 13.264788732394367, "grad_norm": 1.0320146083831787, "learning_rate": 7.074e-06, "loss": 0.3259, "step": 2361 }, { "epoch": 13.270422535211267, "grad_norm": 1.151577353477478, "learning_rate": 7.077e-06, "loss": 0.2385, "step": 2362 }, { "epoch": 13.27605633802817, "grad_norm": 1.236299991607666, "learning_rate": 7.08e-06, "loss": 0.2213, "step": 2363 }, { "epoch": 13.28169014084507, "grad_norm": 1.3038198947906494, "learning_rate": 7.083000000000001e-06, "loss": 0.2396, "step": 2364 }, { "epoch": 13.287323943661972, "grad_norm": 0.9464594125747681, "learning_rate": 7.086e-06, "loss": 0.2139, "step": 2365 }, { "epoch": 13.292957746478873, "grad_norm": 1.423374056816101, "learning_rate": 7.089e-06, "loss": 0.3063, "step": 2366 }, { "epoch": 13.298591549295775, "grad_norm": 1.1278823614120483, "learning_rate": 7.092e-06, "loss": 0.1902, "step": 2367 }, { "epoch": 13.304225352112676, "grad_norm": 1.1864081621170044, "learning_rate": 7.095e-06, "loss": 0.2942, "step": 2368 }, { "epoch": 13.309859154929578, "grad_norm": 0.959836483001709, "learning_rate": 7.0980000000000005e-06, "loss": 0.215, "step": 2369 }, { "epoch": 13.315492957746478, "grad_norm": 1.3126745223999023, "learning_rate": 7.101e-06, "loss": 0.2359, "step": 2370 }, { "epoch": 13.32112676056338, "grad_norm": 1.346785068511963, "learning_rate": 7.104e-06, "loss": 0.3455, "step": 2371 }, { "epoch": 13.326760563380281, "grad_norm": 0.9002761840820312, "learning_rate": 7.107e-06, "loss": 0.1494, "step": 2372 }, { "epoch": 13.332394366197184, "grad_norm": 0.9469854235649109, "learning_rate": 7.11e-06, "loss": 0.236, "step": 2373 }, { "epoch": 13.338028169014084, "grad_norm": 1.8179179430007935, "learning_rate": 7.113e-06, "loss": 0.23, "step": 2374 }, { "epoch": 13.343661971830986, "grad_norm": 1.1077646017074585, "learning_rate": 7.116e-06, "loss": 0.1699, "step": 2375 }, { "epoch": 13.349295774647887, "grad_norm": 1.1191051006317139, "learning_rate": 7.119000000000001e-06, "loss": 0.2161, "step": 2376 }, { "epoch": 13.35492957746479, "grad_norm": 0.9753214716911316, "learning_rate": 7.122000000000001e-06, "loss": 0.2097, "step": 2377 }, { "epoch": 13.36056338028169, "grad_norm": 0.7419871687889099, "learning_rate": 7.1249999999999995e-06, "loss": 0.1117, "step": 2378 }, { "epoch": 13.366197183098592, "grad_norm": 2.1409976482391357, "learning_rate": 7.128e-06, "loss": 0.1742, "step": 2379 }, { "epoch": 13.371830985915492, "grad_norm": 0.957535982131958, "learning_rate": 7.131e-06, "loss": 0.1771, "step": 2380 }, { "epoch": 13.377464788732395, "grad_norm": 1.2483243942260742, "learning_rate": 7.134000000000001e-06, "loss": 0.1592, "step": 2381 }, { "epoch": 13.383098591549295, "grad_norm": 0.9406074285507202, "learning_rate": 7.1370000000000004e-06, "loss": 0.1423, "step": 2382 }, { "epoch": 13.388732394366198, "grad_norm": 1.0298913717269897, "learning_rate": 7.14e-06, "loss": 0.1315, "step": 2383 }, { "epoch": 13.394366197183098, "grad_norm": 1.1309726238250732, "learning_rate": 7.143e-06, "loss": 0.1613, "step": 2384 }, { "epoch": 13.4, "grad_norm": 1.1114422082901, "learning_rate": 7.146e-06, "loss": 0.1519, "step": 2385 }, { "epoch": 13.4056338028169, "grad_norm": 1.1689764261245728, "learning_rate": 7.1490000000000005e-06, "loss": 0.1133, "step": 2386 }, { "epoch": 13.411267605633803, "grad_norm": 2.5627989768981934, "learning_rate": 7.152e-06, "loss": 0.0928, "step": 2387 }, { "epoch": 13.416901408450704, "grad_norm": 0.8929197788238525, "learning_rate": 7.155e-06, "loss": 0.1425, "step": 2388 }, { "epoch": 13.422535211267606, "grad_norm": 0.8751282691955566, "learning_rate": 7.158000000000001e-06, "loss": 0.1363, "step": 2389 }, { "epoch": 13.428169014084506, "grad_norm": 0.7572558522224426, "learning_rate": 7.161e-06, "loss": 0.0937, "step": 2390 }, { "epoch": 13.433802816901409, "grad_norm": 1.5400004386901855, "learning_rate": 7.164e-06, "loss": 0.1303, "step": 2391 }, { "epoch": 13.43943661971831, "grad_norm": 0.9770224690437317, "learning_rate": 7.167e-06, "loss": 0.0971, "step": 2392 }, { "epoch": 13.445070422535212, "grad_norm": 0.830382764339447, "learning_rate": 7.17e-06, "loss": 0.1038, "step": 2393 }, { "epoch": 13.450704225352112, "grad_norm": 1.1589969396591187, "learning_rate": 7.173000000000001e-06, "loss": 0.1429, "step": 2394 }, { "epoch": 13.456338028169014, "grad_norm": 2.110220193862915, "learning_rate": 7.176e-06, "loss": 0.1642, "step": 2395 }, { "epoch": 13.461971830985915, "grad_norm": 0.9293519854545593, "learning_rate": 7.179e-06, "loss": 0.1378, "step": 2396 }, { "epoch": 13.467605633802817, "grad_norm": 1.06087327003479, "learning_rate": 7.182e-06, "loss": 0.1519, "step": 2397 }, { "epoch": 13.473239436619718, "grad_norm": 1.054998755455017, "learning_rate": 7.185e-06, "loss": 0.1232, "step": 2398 }, { "epoch": 13.47887323943662, "grad_norm": 1.1757842302322388, "learning_rate": 7.1880000000000005e-06, "loss": 0.1028, "step": 2399 }, { "epoch": 13.48450704225352, "grad_norm": 2.1529064178466797, "learning_rate": 7.191e-06, "loss": 0.1138, "step": 2400 }, { "epoch": 13.490140845070423, "grad_norm": 1.410528302192688, "learning_rate": 7.194000000000001e-06, "loss": 0.1267, "step": 2401 }, { "epoch": 13.495774647887323, "grad_norm": 3.1351592540740967, "learning_rate": 7.197e-06, "loss": 0.0716, "step": 2402 }, { "epoch": 13.501408450704226, "grad_norm": 1.6554334163665771, "learning_rate": 7.2e-06, "loss": 0.2966, "step": 2403 }, { "epoch": 13.507042253521126, "grad_norm": 1.5018271207809448, "learning_rate": 7.203e-06, "loss": 0.2896, "step": 2404 }, { "epoch": 13.512676056338028, "grad_norm": 1.1894396543502808, "learning_rate": 7.206e-06, "loss": 0.2528, "step": 2405 }, { "epoch": 13.518309859154929, "grad_norm": 0.9320605397224426, "learning_rate": 7.209000000000001e-06, "loss": 0.3096, "step": 2406 }, { "epoch": 13.523943661971831, "grad_norm": 1.1828113794326782, "learning_rate": 7.2120000000000006e-06, "loss": 0.2843, "step": 2407 }, { "epoch": 13.529577464788732, "grad_norm": 1.1249010562896729, "learning_rate": 7.2149999999999995e-06, "loss": 0.275, "step": 2408 }, { "epoch": 13.535211267605634, "grad_norm": 1.0248589515686035, "learning_rate": 7.218e-06, "loss": 0.2709, "step": 2409 }, { "epoch": 13.540845070422534, "grad_norm": 0.8823660016059875, "learning_rate": 7.221e-06, "loss": 0.2279, "step": 2410 }, { "epoch": 13.546478873239437, "grad_norm": 1.0760838985443115, "learning_rate": 7.224e-06, "loss": 0.2316, "step": 2411 }, { "epoch": 13.552112676056337, "grad_norm": 1.0274428129196167, "learning_rate": 7.2270000000000004e-06, "loss": 0.2304, "step": 2412 }, { "epoch": 13.55774647887324, "grad_norm": 0.8955510258674622, "learning_rate": 7.23e-06, "loss": 0.2048, "step": 2413 }, { "epoch": 13.56338028169014, "grad_norm": 0.769860029220581, "learning_rate": 7.233e-06, "loss": 0.1916, "step": 2414 }, { "epoch": 13.569014084507042, "grad_norm": 0.7990733981132507, "learning_rate": 7.236e-06, "loss": 0.1745, "step": 2415 }, { "epoch": 13.574647887323943, "grad_norm": 0.96040278673172, "learning_rate": 7.239e-06, "loss": 0.1421, "step": 2416 }, { "epoch": 13.580281690140845, "grad_norm": 1.3098976612091064, "learning_rate": 7.242e-06, "loss": 0.2189, "step": 2417 }, { "epoch": 13.585915492957746, "grad_norm": 1.1566462516784668, "learning_rate": 7.245e-06, "loss": 0.164, "step": 2418 }, { "epoch": 13.591549295774648, "grad_norm": 1.013031244277954, "learning_rate": 7.248000000000001e-06, "loss": 0.1822, "step": 2419 }, { "epoch": 13.597183098591549, "grad_norm": 0.890204906463623, "learning_rate": 7.2510000000000005e-06, "loss": 0.2045, "step": 2420 }, { "epoch": 13.60281690140845, "grad_norm": 0.9895558953285217, "learning_rate": 7.2539999999999995e-06, "loss": 0.1282, "step": 2421 }, { "epoch": 13.608450704225351, "grad_norm": 1.0559557676315308, "learning_rate": 7.257e-06, "loss": 0.1486, "step": 2422 }, { "epoch": 13.614084507042254, "grad_norm": 1.1751247644424438, "learning_rate": 7.26e-06, "loss": 0.1161, "step": 2423 }, { "epoch": 13.619718309859154, "grad_norm": 0.704023003578186, "learning_rate": 7.263000000000001e-06, "loss": 0.1099, "step": 2424 }, { "epoch": 13.625352112676056, "grad_norm": 1.1705760955810547, "learning_rate": 7.266e-06, "loss": 0.1991, "step": 2425 }, { "epoch": 13.630985915492957, "grad_norm": 0.7136492133140564, "learning_rate": 7.269e-06, "loss": 0.1298, "step": 2426 }, { "epoch": 13.63661971830986, "grad_norm": 0.9863913059234619, "learning_rate": 7.272e-06, "loss": 0.1197, "step": 2427 }, { "epoch": 13.642253521126761, "grad_norm": 1.096895456314087, "learning_rate": 7.275e-06, "loss": 0.1326, "step": 2428 }, { "epoch": 13.647887323943662, "grad_norm": 0.84125816822052, "learning_rate": 7.2780000000000005e-06, "loss": 0.1485, "step": 2429 }, { "epoch": 13.653521126760563, "grad_norm": 0.8120884895324707, "learning_rate": 7.281e-06, "loss": 0.1059, "step": 2430 }, { "epoch": 13.659154929577465, "grad_norm": 1.302880883216858, "learning_rate": 7.284e-06, "loss": 0.1511, "step": 2431 }, { "epoch": 13.664788732394367, "grad_norm": 1.3012844324111938, "learning_rate": 7.287000000000001e-06, "loss": 0.1574, "step": 2432 }, { "epoch": 13.670422535211268, "grad_norm": 0.8365102410316467, "learning_rate": 7.29e-06, "loss": 0.0961, "step": 2433 }, { "epoch": 13.676056338028168, "grad_norm": 1.2194106578826904, "learning_rate": 7.293e-06, "loss": 0.2007, "step": 2434 }, { "epoch": 13.68169014084507, "grad_norm": 0.8804576396942139, "learning_rate": 7.296e-06, "loss": 0.1322, "step": 2435 }, { "epoch": 13.687323943661973, "grad_norm": 1.0952662229537964, "learning_rate": 7.299e-06, "loss": 0.1587, "step": 2436 }, { "epoch": 13.692957746478873, "grad_norm": 0.9609819054603577, "learning_rate": 7.3020000000000006e-06, "loss": 0.0889, "step": 2437 }, { "epoch": 13.698591549295774, "grad_norm": 0.7399151921272278, "learning_rate": 7.305e-06, "loss": 0.0987, "step": 2438 }, { "epoch": 13.704225352112676, "grad_norm": 1.103484869003296, "learning_rate": 7.308e-06, "loss": 0.1453, "step": 2439 }, { "epoch": 13.709859154929578, "grad_norm": 0.9307721257209778, "learning_rate": 7.311e-06, "loss": 0.1047, "step": 2440 }, { "epoch": 13.715492957746479, "grad_norm": 1.0198315382003784, "learning_rate": 7.314e-06, "loss": 0.1349, "step": 2441 }, { "epoch": 13.721126760563381, "grad_norm": 1.02377450466156, "learning_rate": 7.317e-06, "loss": 0.1084, "step": 2442 }, { "epoch": 13.726760563380282, "grad_norm": 1.2538667917251587, "learning_rate": 7.32e-06, "loss": 0.0751, "step": 2443 }, { "epoch": 13.732394366197184, "grad_norm": 0.9317352175712585, "learning_rate": 7.323000000000001e-06, "loss": 0.1257, "step": 2444 }, { "epoch": 13.738028169014084, "grad_norm": 1.0897257328033447, "learning_rate": 7.326e-06, "loss": 0.1059, "step": 2445 }, { "epoch": 13.743661971830987, "grad_norm": 3.0194356441497803, "learning_rate": 7.329e-06, "loss": 0.1091, "step": 2446 }, { "epoch": 13.749295774647887, "grad_norm": 1.5205198526382446, "learning_rate": 7.332e-06, "loss": 0.355, "step": 2447 }, { "epoch": 13.75492957746479, "grad_norm": 1.1576690673828125, "learning_rate": 7.335e-06, "loss": 0.3303, "step": 2448 }, { "epoch": 13.76056338028169, "grad_norm": 1.1300952434539795, "learning_rate": 7.338000000000001e-06, "loss": 0.3508, "step": 2449 }, { "epoch": 13.766197183098592, "grad_norm": 0.8204237222671509, "learning_rate": 7.3410000000000005e-06, "loss": 0.2498, "step": 2450 }, { "epoch": 13.771830985915493, "grad_norm": 0.937884509563446, "learning_rate": 7.3439999999999995e-06, "loss": 0.2318, "step": 2451 }, { "epoch": 13.777464788732395, "grad_norm": 1.1115881204605103, "learning_rate": 7.347e-06, "loss": 0.2803, "step": 2452 }, { "epoch": 13.783098591549296, "grad_norm": 1.1948392391204834, "learning_rate": 7.35e-06, "loss": 0.2318, "step": 2453 }, { "epoch": 13.788732394366198, "grad_norm": 0.9145007729530334, "learning_rate": 7.353000000000001e-06, "loss": 0.2358, "step": 2454 }, { "epoch": 13.794366197183098, "grad_norm": 0.9771397709846497, "learning_rate": 7.356e-06, "loss": 0.2095, "step": 2455 }, { "epoch": 13.8, "grad_norm": 1.1815807819366455, "learning_rate": 7.359e-06, "loss": 0.1784, "step": 2456 }, { "epoch": 13.805633802816901, "grad_norm": 0.8465495109558105, "learning_rate": 7.362e-06, "loss": 0.2054, "step": 2457 }, { "epoch": 13.811267605633804, "grad_norm": 1.071962594985962, "learning_rate": 7.365e-06, "loss": 0.2149, "step": 2458 }, { "epoch": 13.816901408450704, "grad_norm": 0.8791579008102417, "learning_rate": 7.3680000000000004e-06, "loss": 0.1789, "step": 2459 }, { "epoch": 13.822535211267606, "grad_norm": 1.2353765964508057, "learning_rate": 7.371e-06, "loss": 0.2691, "step": 2460 }, { "epoch": 13.828169014084507, "grad_norm": 1.3512202501296997, "learning_rate": 7.374e-06, "loss": 0.1502, "step": 2461 }, { "epoch": 13.83380281690141, "grad_norm": 1.0855315923690796, "learning_rate": 7.377000000000001e-06, "loss": 0.1958, "step": 2462 }, { "epoch": 13.83943661971831, "grad_norm": 0.8112655878067017, "learning_rate": 7.3800000000000005e-06, "loss": 0.1555, "step": 2463 }, { "epoch": 13.845070422535212, "grad_norm": 0.8505599498748779, "learning_rate": 7.383e-06, "loss": 0.1539, "step": 2464 }, { "epoch": 13.850704225352112, "grad_norm": 0.8195877075195312, "learning_rate": 7.386e-06, "loss": 0.1333, "step": 2465 }, { "epoch": 13.856338028169015, "grad_norm": 1.2472988367080688, "learning_rate": 7.389e-06, "loss": 0.1502, "step": 2466 }, { "epoch": 13.861971830985915, "grad_norm": 1.132716178894043, "learning_rate": 7.3920000000000005e-06, "loss": 0.1895, "step": 2467 }, { "epoch": 13.867605633802818, "grad_norm": 1.5551981925964355, "learning_rate": 7.395e-06, "loss": 0.2396, "step": 2468 }, { "epoch": 13.873239436619718, "grad_norm": 1.064211368560791, "learning_rate": 7.398000000000001e-06, "loss": 0.1619, "step": 2469 }, { "epoch": 13.87887323943662, "grad_norm": 1.1658692359924316, "learning_rate": 7.401e-06, "loss": 0.1768, "step": 2470 }, { "epoch": 13.88450704225352, "grad_norm": 0.7762494087219238, "learning_rate": 7.404e-06, "loss": 0.131, "step": 2471 }, { "epoch": 13.890140845070423, "grad_norm": 0.9156432747840881, "learning_rate": 7.407e-06, "loss": 0.1431, "step": 2472 }, { "epoch": 13.895774647887324, "grad_norm": 0.984115719795227, "learning_rate": 7.41e-06, "loss": 0.194, "step": 2473 }, { "epoch": 13.901408450704226, "grad_norm": 1.1039249897003174, "learning_rate": 7.413e-06, "loss": 0.1054, "step": 2474 }, { "epoch": 13.907042253521126, "grad_norm": 0.5961586833000183, "learning_rate": 7.416000000000001e-06, "loss": 0.1241, "step": 2475 }, { "epoch": 13.912676056338029, "grad_norm": 0.8967409729957581, "learning_rate": 7.419e-06, "loss": 0.2183, "step": 2476 }, { "epoch": 13.91830985915493, "grad_norm": 0.9441268444061279, "learning_rate": 7.422e-06, "loss": 0.1109, "step": 2477 }, { "epoch": 13.923943661971832, "grad_norm": 0.7546868324279785, "learning_rate": 7.425e-06, "loss": 0.1085, "step": 2478 }, { "epoch": 13.929577464788732, "grad_norm": 1.3075083494186401, "learning_rate": 7.428e-06, "loss": 0.2256, "step": 2479 }, { "epoch": 13.935211267605634, "grad_norm": 1.0688899755477905, "learning_rate": 7.4310000000000005e-06, "loss": 0.1394, "step": 2480 }, { "epoch": 13.940845070422535, "grad_norm": 1.0703538656234741, "learning_rate": 7.434e-06, "loss": 0.1291, "step": 2481 }, { "epoch": 13.946478873239437, "grad_norm": 0.9404478073120117, "learning_rate": 7.437e-06, "loss": 0.1593, "step": 2482 }, { "epoch": 13.952112676056338, "grad_norm": 0.860598087310791, "learning_rate": 7.44e-06, "loss": 0.1114, "step": 2483 }, { "epoch": 13.95774647887324, "grad_norm": 1.135445237159729, "learning_rate": 7.443e-06, "loss": 0.1017, "step": 2484 }, { "epoch": 13.96338028169014, "grad_norm": 0.7853214144706726, "learning_rate": 7.446e-06, "loss": 0.0899, "step": 2485 }, { "epoch": 13.969014084507043, "grad_norm": 0.9157571792602539, "learning_rate": 7.449e-06, "loss": 0.1161, "step": 2486 }, { "epoch": 13.974647887323943, "grad_norm": 1.0105891227722168, "learning_rate": 7.452000000000001e-06, "loss": 0.0972, "step": 2487 }, { "epoch": 13.980281690140846, "grad_norm": 0.8211488127708435, "learning_rate": 7.455e-06, "loss": 0.0838, "step": 2488 }, { "epoch": 13.985915492957746, "grad_norm": 0.977110743522644, "learning_rate": 7.4579999999999996e-06, "loss": 0.1432, "step": 2489 }, { "epoch": 13.991549295774648, "grad_norm": 1.1565195322036743, "learning_rate": 7.461e-06, "loss": 0.1031, "step": 2490 }, { "epoch": 13.997183098591549, "grad_norm": 0.9908108115196228, "learning_rate": 7.464e-06, "loss": 0.3086, "step": 2491 }, { "epoch": 14.0, "grad_norm": 1.9380509853363037, "learning_rate": 7.467000000000001e-06, "loss": 0.1312, "step": 2492 }, { "epoch": 14.005633802816902, "grad_norm": 1.4375203847885132, "learning_rate": 7.4700000000000005e-06, "loss": 0.3191, "step": 2493 }, { "epoch": 14.011267605633803, "grad_norm": 1.2518107891082764, "learning_rate": 7.4729999999999994e-06, "loss": 0.2648, "step": 2494 }, { "epoch": 14.016901408450705, "grad_norm": 0.9817587733268738, "learning_rate": 7.476e-06, "loss": 0.2953, "step": 2495 }, { "epoch": 14.022535211267606, "grad_norm": 1.0936018228530884, "learning_rate": 7.479e-06, "loss": 0.2525, "step": 2496 }, { "epoch": 14.028169014084508, "grad_norm": 0.8114771842956543, "learning_rate": 7.4820000000000005e-06, "loss": 0.2243, "step": 2497 }, { "epoch": 14.033802816901408, "grad_norm": 0.9761454463005066, "learning_rate": 7.485e-06, "loss": 0.2103, "step": 2498 }, { "epoch": 14.03943661971831, "grad_norm": 1.4058713912963867, "learning_rate": 7.488e-06, "loss": 0.2312, "step": 2499 }, { "epoch": 14.045070422535211, "grad_norm": 1.159949541091919, "learning_rate": 7.491e-06, "loss": 0.2511, "step": 2500 }, { "epoch": 14.050704225352113, "grad_norm": 0.9196081161499023, "learning_rate": 7.494e-06, "loss": 0.193, "step": 2501 }, { "epoch": 14.056338028169014, "grad_norm": 0.6740989089012146, "learning_rate": 7.497e-06, "loss": 0.1673, "step": 2502 }, { "epoch": 14.061971830985916, "grad_norm": 1.036558747291565, "learning_rate": 7.5e-06, "loss": 0.1995, "step": 2503 }, { "epoch": 14.067605633802817, "grad_norm": 0.8210061192512512, "learning_rate": 7.503e-06, "loss": 0.1659, "step": 2504 }, { "epoch": 14.073239436619719, "grad_norm": 1.2209875583648682, "learning_rate": 7.506e-06, "loss": 0.2206, "step": 2505 }, { "epoch": 14.07887323943662, "grad_norm": 1.2579416036605835, "learning_rate": 7.5090000000000004e-06, "loss": 0.1895, "step": 2506 }, { "epoch": 14.084507042253522, "grad_norm": 1.0967655181884766, "learning_rate": 7.512e-06, "loss": 0.1337, "step": 2507 }, { "epoch": 14.090140845070422, "grad_norm": 1.1392700672149658, "learning_rate": 7.515e-06, "loss": 0.1727, "step": 2508 }, { "epoch": 14.095774647887325, "grad_norm": 0.8208845853805542, "learning_rate": 7.518e-06, "loss": 0.1707, "step": 2509 }, { "epoch": 14.101408450704225, "grad_norm": 1.1638633012771606, "learning_rate": 7.521e-06, "loss": 0.1502, "step": 2510 }, { "epoch": 14.107042253521128, "grad_norm": 1.1409746408462524, "learning_rate": 7.524000000000001e-06, "loss": 0.1913, "step": 2511 }, { "epoch": 14.112676056338028, "grad_norm": 1.1333116292953491, "learning_rate": 7.527000000000001e-06, "loss": 0.1588, "step": 2512 }, { "epoch": 14.11830985915493, "grad_norm": 0.9273078441619873, "learning_rate": 7.53e-06, "loss": 0.149, "step": 2513 }, { "epoch": 14.12394366197183, "grad_norm": 0.6535177826881409, "learning_rate": 7.533e-06, "loss": 0.0846, "step": 2514 }, { "epoch": 14.129577464788733, "grad_norm": 0.8918919563293457, "learning_rate": 7.5359999999999995e-06, "loss": 0.1439, "step": 2515 }, { "epoch": 14.135211267605634, "grad_norm": 0.7458071112632751, "learning_rate": 7.539000000000001e-06, "loss": 0.1053, "step": 2516 }, { "epoch": 14.140845070422536, "grad_norm": 0.7669033408164978, "learning_rate": 7.542000000000001e-06, "loss": 0.0939, "step": 2517 }, { "epoch": 14.146478873239436, "grad_norm": 1.0432018041610718, "learning_rate": 7.545000000000001e-06, "loss": 0.1291, "step": 2518 }, { "epoch": 14.152112676056339, "grad_norm": 0.8472408056259155, "learning_rate": 7.5479999999999996e-06, "loss": 0.134, "step": 2519 }, { "epoch": 14.15774647887324, "grad_norm": 0.5946011543273926, "learning_rate": 7.550999999999999e-06, "loss": 0.0859, "step": 2520 }, { "epoch": 14.163380281690142, "grad_norm": 1.080089807510376, "learning_rate": 7.554000000000001e-06, "loss": 0.1475, "step": 2521 }, { "epoch": 14.169014084507042, "grad_norm": 1.661144733428955, "learning_rate": 7.557000000000001e-06, "loss": 0.1747, "step": 2522 }, { "epoch": 14.174647887323944, "grad_norm": 0.6961541771888733, "learning_rate": 7.5600000000000005e-06, "loss": 0.0807, "step": 2523 }, { "epoch": 14.180281690140845, "grad_norm": 0.7404202222824097, "learning_rate": 7.563e-06, "loss": 0.1252, "step": 2524 }, { "epoch": 14.185915492957747, "grad_norm": 0.7033008933067322, "learning_rate": 7.565999999999999e-06, "loss": 0.103, "step": 2525 }, { "epoch": 14.191549295774648, "grad_norm": 0.803184986114502, "learning_rate": 7.569000000000001e-06, "loss": 0.0735, "step": 2526 }, { "epoch": 14.19718309859155, "grad_norm": 1.143172025680542, "learning_rate": 7.5720000000000005e-06, "loss": 0.1117, "step": 2527 }, { "epoch": 14.20281690140845, "grad_norm": 2.290583848953247, "learning_rate": 7.575e-06, "loss": 0.1256, "step": 2528 }, { "epoch": 14.208450704225353, "grad_norm": 1.1133346557617188, "learning_rate": 7.578e-06, "loss": 0.1227, "step": 2529 }, { "epoch": 14.214084507042253, "grad_norm": 1.153148889541626, "learning_rate": 7.581e-06, "loss": 0.0986, "step": 2530 }, { "epoch": 14.219718309859156, "grad_norm": 2.8459057807922363, "learning_rate": 7.5840000000000006e-06, "loss": 0.1514, "step": 2531 }, { "epoch": 14.225352112676056, "grad_norm": 1.2485533952713013, "learning_rate": 7.587e-06, "loss": 0.0784, "step": 2532 }, { "epoch": 14.230985915492958, "grad_norm": 0.9979634881019592, "learning_rate": 7.59e-06, "loss": 0.0959, "step": 2533 }, { "epoch": 14.236619718309859, "grad_norm": 0.9614206552505493, "learning_rate": 7.593e-06, "loss": 0.1598, "step": 2534 }, { "epoch": 14.242253521126761, "grad_norm": 4.048535346984863, "learning_rate": 7.596e-06, "loss": 0.142, "step": 2535 }, { "epoch": 14.247887323943662, "grad_norm": 2.3054440021514893, "learning_rate": 7.599000000000001e-06, "loss": 0.0896, "step": 2536 }, { "epoch": 14.253521126760564, "grad_norm": 2.1023647785186768, "learning_rate": 7.602e-06, "loss": 0.3647, "step": 2537 }, { "epoch": 14.259154929577464, "grad_norm": 1.0547395944595337, "learning_rate": 7.605e-06, "loss": 0.3032, "step": 2538 }, { "epoch": 14.264788732394367, "grad_norm": 1.0112782716751099, "learning_rate": 7.608e-06, "loss": 0.2665, "step": 2539 }, { "epoch": 14.270422535211267, "grad_norm": 1.2834362983703613, "learning_rate": 7.611e-06, "loss": 0.3498, "step": 2540 }, { "epoch": 14.27605633802817, "grad_norm": 1.0717977285385132, "learning_rate": 7.614000000000001e-06, "loss": 0.2274, "step": 2541 }, { "epoch": 14.28169014084507, "grad_norm": 1.5067107677459717, "learning_rate": 7.617000000000001e-06, "loss": 0.3381, "step": 2542 }, { "epoch": 14.287323943661972, "grad_norm": 1.1613428592681885, "learning_rate": 7.62e-06, "loss": 0.2143, "step": 2543 }, { "epoch": 14.292957746478873, "grad_norm": 1.0167107582092285, "learning_rate": 7.623e-06, "loss": 0.2178, "step": 2544 }, { "epoch": 14.298591549295775, "grad_norm": 0.8218380808830261, "learning_rate": 7.6259999999999995e-06, "loss": 0.2454, "step": 2545 }, { "epoch": 14.304225352112676, "grad_norm": 1.2666577100753784, "learning_rate": 7.629000000000001e-06, "loss": 0.2071, "step": 2546 }, { "epoch": 14.309859154929578, "grad_norm": 1.2041953802108765, "learning_rate": 7.632e-06, "loss": 0.2333, "step": 2547 }, { "epoch": 14.315492957746478, "grad_norm": 0.9935725331306458, "learning_rate": 7.635e-06, "loss": 0.1676, "step": 2548 }, { "epoch": 14.32112676056338, "grad_norm": 0.8743584752082825, "learning_rate": 7.638e-06, "loss": 0.1831, "step": 2549 }, { "epoch": 14.326760563380281, "grad_norm": 1.6299316883087158, "learning_rate": 7.641e-06, "loss": 0.2584, "step": 2550 }, { "epoch": 14.332394366197184, "grad_norm": 1.0508118867874146, "learning_rate": 7.644000000000002e-06, "loss": 0.1723, "step": 2551 }, { "epoch": 14.338028169014084, "grad_norm": 0.8916674256324768, "learning_rate": 7.647000000000001e-06, "loss": 0.1731, "step": 2552 }, { "epoch": 14.343661971830986, "grad_norm": 0.8129504323005676, "learning_rate": 7.65e-06, "loss": 0.1089, "step": 2553 }, { "epoch": 14.349295774647887, "grad_norm": 0.882179856300354, "learning_rate": 7.653e-06, "loss": 0.1703, "step": 2554 }, { "epoch": 14.35492957746479, "grad_norm": 4.1243767738342285, "learning_rate": 7.656e-06, "loss": 0.1136, "step": 2555 }, { "epoch": 14.36056338028169, "grad_norm": 1.2801704406738281, "learning_rate": 7.659e-06, "loss": 0.1362, "step": 2556 }, { "epoch": 14.366197183098592, "grad_norm": 1.5867371559143066, "learning_rate": 7.662e-06, "loss": 0.1933, "step": 2557 }, { "epoch": 14.371830985915492, "grad_norm": 1.1041922569274902, "learning_rate": 7.665e-06, "loss": 0.1491, "step": 2558 }, { "epoch": 14.377464788732395, "grad_norm": 1.046356439590454, "learning_rate": 7.668e-06, "loss": 0.17, "step": 2559 }, { "epoch": 14.383098591549295, "grad_norm": 1.1510828733444214, "learning_rate": 7.671e-06, "loss": 0.1851, "step": 2560 }, { "epoch": 14.388732394366198, "grad_norm": 0.8338695764541626, "learning_rate": 7.674000000000001e-06, "loss": 0.112, "step": 2561 }, { "epoch": 14.394366197183098, "grad_norm": 1.2736207246780396, "learning_rate": 7.677000000000001e-06, "loss": 0.1602, "step": 2562 }, { "epoch": 14.4, "grad_norm": 1.4519656896591187, "learning_rate": 7.680000000000001e-06, "loss": 0.1949, "step": 2563 }, { "epoch": 14.4056338028169, "grad_norm": 0.9182896018028259, "learning_rate": 7.683e-06, "loss": 0.1253, "step": 2564 }, { "epoch": 14.411267605633803, "grad_norm": 0.9237959980964661, "learning_rate": 7.685999999999999e-06, "loss": 0.0945, "step": 2565 }, { "epoch": 14.416901408450704, "grad_norm": 1.3990576267242432, "learning_rate": 7.688999999999999e-06, "loss": 0.1235, "step": 2566 }, { "epoch": 14.422535211267606, "grad_norm": 1.3699400424957275, "learning_rate": 7.692e-06, "loss": 0.1126, "step": 2567 }, { "epoch": 14.428169014084506, "grad_norm": 0.983512818813324, "learning_rate": 7.695e-06, "loss": 0.1039, "step": 2568 }, { "epoch": 14.433802816901409, "grad_norm": 0.9127490520477295, "learning_rate": 7.698e-06, "loss": 0.1621, "step": 2569 }, { "epoch": 14.43943661971831, "grad_norm": 0.8789691925048828, "learning_rate": 7.701e-06, "loss": 0.0884, "step": 2570 }, { "epoch": 14.445070422535212, "grad_norm": 0.7129899263381958, "learning_rate": 7.704e-06, "loss": 0.1142, "step": 2571 }, { "epoch": 14.450704225352112, "grad_norm": 3.573493480682373, "learning_rate": 7.707000000000001e-06, "loss": 0.137, "step": 2572 }, { "epoch": 14.456338028169014, "grad_norm": 1.0213673114776611, "learning_rate": 7.71e-06, "loss": 0.1105, "step": 2573 }, { "epoch": 14.461971830985915, "grad_norm": 0.6353466510772705, "learning_rate": 7.713e-06, "loss": 0.083, "step": 2574 }, { "epoch": 14.467605633802817, "grad_norm": 0.8871382474899292, "learning_rate": 7.716e-06, "loss": 0.1071, "step": 2575 }, { "epoch": 14.473239436619718, "grad_norm": 1.414605736732483, "learning_rate": 7.719e-06, "loss": 0.163, "step": 2576 }, { "epoch": 14.47887323943662, "grad_norm": 1.4842805862426758, "learning_rate": 7.722e-06, "loss": 0.1089, "step": 2577 }, { "epoch": 14.48450704225352, "grad_norm": 0.611676037311554, "learning_rate": 7.725e-06, "loss": 0.075, "step": 2578 }, { "epoch": 14.490140845070423, "grad_norm": 0.9833987951278687, "learning_rate": 7.728e-06, "loss": 0.1229, "step": 2579 }, { "epoch": 14.495774647887323, "grad_norm": 3.1326277256011963, "learning_rate": 7.731e-06, "loss": 0.143, "step": 2580 }, { "epoch": 14.501408450704226, "grad_norm": 1.6467655897140503, "learning_rate": 7.733999999999999e-06, "loss": 0.3065, "step": 2581 }, { "epoch": 14.507042253521126, "grad_norm": 1.1910048723220825, "learning_rate": 7.737e-06, "loss": 0.2918, "step": 2582 }, { "epoch": 14.512676056338028, "grad_norm": 1.0834463834762573, "learning_rate": 7.74e-06, "loss": 0.2463, "step": 2583 }, { "epoch": 14.518309859154929, "grad_norm": 1.0580854415893555, "learning_rate": 7.743e-06, "loss": 0.2385, "step": 2584 }, { "epoch": 14.523943661971831, "grad_norm": 1.390803337097168, "learning_rate": 7.746e-06, "loss": 0.2777, "step": 2585 }, { "epoch": 14.529577464788732, "grad_norm": 0.9166305661201477, "learning_rate": 7.749e-06, "loss": 0.2144, "step": 2586 }, { "epoch": 14.535211267605634, "grad_norm": 0.9835960865020752, "learning_rate": 7.752000000000001e-06, "loss": 0.2578, "step": 2587 }, { "epoch": 14.540845070422534, "grad_norm": 1.7300853729248047, "learning_rate": 7.755000000000001e-06, "loss": 0.2719, "step": 2588 }, { "epoch": 14.546478873239437, "grad_norm": 1.1082377433776855, "learning_rate": 7.758000000000001e-06, "loss": 0.179, "step": 2589 }, { "epoch": 14.552112676056337, "grad_norm": 0.8716225028038025, "learning_rate": 7.760999999999999e-06, "loss": 0.1862, "step": 2590 }, { "epoch": 14.55774647887324, "grad_norm": 1.3490822315216064, "learning_rate": 7.763999999999999e-06, "loss": 0.2206, "step": 2591 }, { "epoch": 14.56338028169014, "grad_norm": 2.049736499786377, "learning_rate": 7.767e-06, "loss": 0.196, "step": 2592 }, { "epoch": 14.569014084507042, "grad_norm": 1.1691184043884277, "learning_rate": 7.77e-06, "loss": 0.2288, "step": 2593 }, { "epoch": 14.574647887323943, "grad_norm": 0.7626386880874634, "learning_rate": 7.773e-06, "loss": 0.1468, "step": 2594 }, { "epoch": 14.580281690140845, "grad_norm": 1.3530088663101196, "learning_rate": 7.776e-06, "loss": 0.2295, "step": 2595 }, { "epoch": 14.585915492957746, "grad_norm": 0.8915318846702576, "learning_rate": 7.779e-06, "loss": 0.1975, "step": 2596 }, { "epoch": 14.591549295774648, "grad_norm": 0.9858392477035522, "learning_rate": 7.782000000000001e-06, "loss": 0.1264, "step": 2597 }, { "epoch": 14.597183098591549, "grad_norm": 1.2323999404907227, "learning_rate": 7.785000000000001e-06, "loss": 0.1809, "step": 2598 }, { "epoch": 14.60281690140845, "grad_norm": 1.0960729122161865, "learning_rate": 7.788e-06, "loss": 0.1284, "step": 2599 }, { "epoch": 14.608450704225351, "grad_norm": 1.1761407852172852, "learning_rate": 7.791e-06, "loss": 0.1422, "step": 2600 }, { "epoch": 14.614084507042254, "grad_norm": 1.0340557098388672, "learning_rate": 7.794e-06, "loss": 0.1618, "step": 2601 }, { "epoch": 14.619718309859154, "grad_norm": 0.8146259188652039, "learning_rate": 7.797e-06, "loss": 0.159, "step": 2602 }, { "epoch": 14.625352112676056, "grad_norm": 0.9836282134056091, "learning_rate": 7.8e-06, "loss": 0.1664, "step": 2603 }, { "epoch": 14.630985915492957, "grad_norm": 1.1143687963485718, "learning_rate": 7.803e-06, "loss": 0.1429, "step": 2604 }, { "epoch": 14.63661971830986, "grad_norm": 0.8381540775299072, "learning_rate": 7.806e-06, "loss": 0.1742, "step": 2605 }, { "epoch": 14.642253521126761, "grad_norm": 0.854367196559906, "learning_rate": 7.809e-06, "loss": 0.1072, "step": 2606 }, { "epoch": 14.647887323943662, "grad_norm": 0.9884629845619202, "learning_rate": 7.812e-06, "loss": 0.1538, "step": 2607 }, { "epoch": 14.653521126760563, "grad_norm": 0.742863655090332, "learning_rate": 7.815e-06, "loss": 0.1107, "step": 2608 }, { "epoch": 14.659154929577465, "grad_norm": 1.1022722721099854, "learning_rate": 7.818e-06, "loss": 0.12, "step": 2609 }, { "epoch": 14.664788732394367, "grad_norm": 0.8258447051048279, "learning_rate": 7.821e-06, "loss": 0.1118, "step": 2610 }, { "epoch": 14.670422535211268, "grad_norm": 0.6493814587593079, "learning_rate": 7.824e-06, "loss": 0.0957, "step": 2611 }, { "epoch": 14.676056338028168, "grad_norm": 0.8995725512504578, "learning_rate": 7.827000000000001e-06, "loss": 0.1751, "step": 2612 }, { "epoch": 14.68169014084507, "grad_norm": 0.741944432258606, "learning_rate": 7.830000000000001e-06, "loss": 0.1454, "step": 2613 }, { "epoch": 14.687323943661973, "grad_norm": 0.9924150705337524, "learning_rate": 7.833e-06, "loss": 0.1636, "step": 2614 }, { "epoch": 14.692957746478873, "grad_norm": 0.8410320281982422, "learning_rate": 7.836e-06, "loss": 0.1054, "step": 2615 }, { "epoch": 14.698591549295774, "grad_norm": 0.9369813799858093, "learning_rate": 7.838999999999999e-06, "loss": 0.1, "step": 2616 }, { "epoch": 14.704225352112676, "grad_norm": 0.8464856743812561, "learning_rate": 7.842e-06, "loss": 0.103, "step": 2617 }, { "epoch": 14.709859154929578, "grad_norm": 1.1192958354949951, "learning_rate": 7.845e-06, "loss": 0.1017, "step": 2618 }, { "epoch": 14.715492957746479, "grad_norm": 0.9924764633178711, "learning_rate": 7.848e-06, "loss": 0.0948, "step": 2619 }, { "epoch": 14.721126760563381, "grad_norm": 1.4687048196792603, "learning_rate": 7.851e-06, "loss": 0.1723, "step": 2620 }, { "epoch": 14.726760563380282, "grad_norm": 2.1215264797210693, "learning_rate": 7.854e-06, "loss": 0.0827, "step": 2621 }, { "epoch": 14.732394366197184, "grad_norm": 0.8177070021629333, "learning_rate": 7.857000000000001e-06, "loss": 0.056, "step": 2622 }, { "epoch": 14.738028169014084, "grad_norm": 1.261788249015808, "learning_rate": 7.860000000000001e-06, "loss": 0.092, "step": 2623 }, { "epoch": 14.743661971830987, "grad_norm": 1.1906330585479736, "learning_rate": 7.863e-06, "loss": 0.1078, "step": 2624 }, { "epoch": 14.749295774647887, "grad_norm": 1.1605759859085083, "learning_rate": 7.866e-06, "loss": 0.3387, "step": 2625 }, { "epoch": 14.75492957746479, "grad_norm": 1.1441564559936523, "learning_rate": 7.868999999999999e-06, "loss": 0.2799, "step": 2626 }, { "epoch": 14.76056338028169, "grad_norm": 1.0899624824523926, "learning_rate": 7.872e-06, "loss": 0.2839, "step": 2627 }, { "epoch": 14.766197183098592, "grad_norm": 0.9586014747619629, "learning_rate": 7.875e-06, "loss": 0.2433, "step": 2628 }, { "epoch": 14.771830985915493, "grad_norm": 1.0003576278686523, "learning_rate": 7.878e-06, "loss": 0.2395, "step": 2629 }, { "epoch": 14.777464788732395, "grad_norm": 0.8055503368377686, "learning_rate": 7.881e-06, "loss": 0.2373, "step": 2630 }, { "epoch": 14.783098591549296, "grad_norm": 1.1775400638580322, "learning_rate": 7.884e-06, "loss": 0.2593, "step": 2631 }, { "epoch": 14.788732394366198, "grad_norm": 0.8302273750305176, "learning_rate": 7.887000000000001e-06, "loss": 0.1842, "step": 2632 }, { "epoch": 14.794366197183098, "grad_norm": 0.8839142322540283, "learning_rate": 7.89e-06, "loss": 0.1936, "step": 2633 }, { "epoch": 14.8, "grad_norm": 0.9619791507720947, "learning_rate": 7.893e-06, "loss": 0.2055, "step": 2634 }, { "epoch": 14.805633802816901, "grad_norm": 1.2731938362121582, "learning_rate": 7.896e-06, "loss": 0.2789, "step": 2635 }, { "epoch": 14.811267605633804, "grad_norm": 0.9665244221687317, "learning_rate": 7.899e-06, "loss": 0.2038, "step": 2636 }, { "epoch": 14.816901408450704, "grad_norm": 0.9192584156990051, "learning_rate": 7.902000000000002e-06, "loss": 0.2092, "step": 2637 }, { "epoch": 14.822535211267606, "grad_norm": 1.2216484546661377, "learning_rate": 7.905000000000001e-06, "loss": 0.2027, "step": 2638 }, { "epoch": 14.828169014084507, "grad_norm": 0.9423441290855408, "learning_rate": 7.908e-06, "loss": 0.1827, "step": 2639 }, { "epoch": 14.83380281690141, "grad_norm": 1.7397093772888184, "learning_rate": 7.911e-06, "loss": 0.1809, "step": 2640 }, { "epoch": 14.83943661971831, "grad_norm": 1.1128698587417603, "learning_rate": 7.913999999999999e-06, "loss": 0.1993, "step": 2641 }, { "epoch": 14.845070422535212, "grad_norm": 2.127702236175537, "learning_rate": 7.917e-06, "loss": 0.1733, "step": 2642 }, { "epoch": 14.850704225352112, "grad_norm": 1.158449649810791, "learning_rate": 7.92e-06, "loss": 0.1264, "step": 2643 }, { "epoch": 14.856338028169015, "grad_norm": 1.0250725746154785, "learning_rate": 7.923e-06, "loss": 0.1418, "step": 2644 }, { "epoch": 14.861971830985915, "grad_norm": 0.6589845418930054, "learning_rate": 7.926e-06, "loss": 0.1206, "step": 2645 }, { "epoch": 14.867605633802818, "grad_norm": 0.8348840475082397, "learning_rate": 7.929e-06, "loss": 0.1392, "step": 2646 }, { "epoch": 14.873239436619718, "grad_norm": 1.2787216901779175, "learning_rate": 7.932000000000001e-06, "loss": 0.2188, "step": 2647 }, { "epoch": 14.87887323943662, "grad_norm": 0.7360659241676331, "learning_rate": 7.935000000000001e-06, "loss": 0.1101, "step": 2648 }, { "epoch": 14.88450704225352, "grad_norm": 0.7578384280204773, "learning_rate": 7.938000000000001e-06, "loss": 0.115, "step": 2649 }, { "epoch": 14.890140845070423, "grad_norm": 0.7701386213302612, "learning_rate": 7.941e-06, "loss": 0.121, "step": 2650 }, { "epoch": 14.895774647887324, "grad_norm": 1.06251859664917, "learning_rate": 7.943999999999999e-06, "loss": 0.1024, "step": 2651 }, { "epoch": 14.901408450704226, "grad_norm": 0.7460308074951172, "learning_rate": 7.947e-06, "loss": 0.1369, "step": 2652 }, { "epoch": 14.907042253521126, "grad_norm": 1.163138747215271, "learning_rate": 7.95e-06, "loss": 0.1035, "step": 2653 }, { "epoch": 14.912676056338029, "grad_norm": 1.0894348621368408, "learning_rate": 7.953e-06, "loss": 0.1278, "step": 2654 }, { "epoch": 14.91830985915493, "grad_norm": 1.0023423433303833, "learning_rate": 7.956e-06, "loss": 0.1245, "step": 2655 }, { "epoch": 14.923943661971832, "grad_norm": 0.9256436824798584, "learning_rate": 7.959e-06, "loss": 0.1189, "step": 2656 }, { "epoch": 14.929577464788732, "grad_norm": 1.1167656183242798, "learning_rate": 7.962000000000001e-06, "loss": 0.207, "step": 2657 }, { "epoch": 14.935211267605634, "grad_norm": 0.8095980882644653, "learning_rate": 7.965e-06, "loss": 0.0957, "step": 2658 }, { "epoch": 14.940845070422535, "grad_norm": 0.6890567541122437, "learning_rate": 7.968e-06, "loss": 0.0915, "step": 2659 }, { "epoch": 14.946478873239437, "grad_norm": 2.002119541168213, "learning_rate": 7.971e-06, "loss": 0.1215, "step": 2660 }, { "epoch": 14.952112676056338, "grad_norm": 1.0603971481323242, "learning_rate": 7.974e-06, "loss": 0.1178, "step": 2661 }, { "epoch": 14.95774647887324, "grad_norm": 0.6045705080032349, "learning_rate": 7.977000000000002e-06, "loss": 0.1028, "step": 2662 }, { "epoch": 14.96338028169014, "grad_norm": 1.4790658950805664, "learning_rate": 7.98e-06, "loss": 0.085, "step": 2663 }, { "epoch": 14.969014084507043, "grad_norm": 0.848943293094635, "learning_rate": 7.983e-06, "loss": 0.0669, "step": 2664 }, { "epoch": 14.974647887323943, "grad_norm": 0.8751040697097778, "learning_rate": 7.986e-06, "loss": 0.0877, "step": 2665 }, { "epoch": 14.980281690140846, "grad_norm": 0.9314478039741516, "learning_rate": 7.989e-06, "loss": 0.0854, "step": 2666 }, { "epoch": 14.985915492957746, "grad_norm": 0.9573487639427185, "learning_rate": 7.992e-06, "loss": 0.065, "step": 2667 }, { "epoch": 14.991549295774648, "grad_norm": 0.8755103349685669, "learning_rate": 7.995e-06, "loss": 0.0655, "step": 2668 }, { "epoch": 14.997183098591549, "grad_norm": 1.2684926986694336, "learning_rate": 7.998e-06, "loss": 0.2628, "step": 2669 }, { "epoch": 15.0, "grad_norm": 0.6340720057487488, "learning_rate": 8.001e-06, "loss": 0.0367, "step": 2670 }, { "epoch": 15.005633802816902, "grad_norm": 1.109892725944519, "learning_rate": 8.004e-06, "loss": 0.2542, "step": 2671 }, { "epoch": 15.011267605633803, "grad_norm": 0.8671929240226746, "learning_rate": 8.007000000000001e-06, "loss": 0.2427, "step": 2672 }, { "epoch": 15.016901408450705, "grad_norm": 0.9852443933486938, "learning_rate": 8.010000000000001e-06, "loss": 0.2761, "step": 2673 }, { "epoch": 15.022535211267606, "grad_norm": 0.9453830718994141, "learning_rate": 8.013000000000001e-06, "loss": 0.2261, "step": 2674 }, { "epoch": 15.028169014084508, "grad_norm": 1.0461130142211914, "learning_rate": 8.016e-06, "loss": 0.328, "step": 2675 }, { "epoch": 15.033802816901408, "grad_norm": 1.7114094495773315, "learning_rate": 8.018999999999999e-06, "loss": 0.2082, "step": 2676 }, { "epoch": 15.03943661971831, "grad_norm": 1.1261855363845825, "learning_rate": 8.022e-06, "loss": 0.2578, "step": 2677 }, { "epoch": 15.045070422535211, "grad_norm": 0.84699547290802, "learning_rate": 8.025e-06, "loss": 0.2128, "step": 2678 }, { "epoch": 15.050704225352113, "grad_norm": 0.9156381487846375, "learning_rate": 8.028e-06, "loss": 0.2384, "step": 2679 }, { "epoch": 15.056338028169014, "grad_norm": 0.8054420948028564, "learning_rate": 8.031e-06, "loss": 0.1957, "step": 2680 }, { "epoch": 15.061971830985916, "grad_norm": 0.9509589076042175, "learning_rate": 8.034e-06, "loss": 0.231, "step": 2681 }, { "epoch": 15.067605633802817, "grad_norm": 0.721084713935852, "learning_rate": 8.037000000000001e-06, "loss": 0.1917, "step": 2682 }, { "epoch": 15.073239436619719, "grad_norm": 0.7099788784980774, "learning_rate": 8.040000000000001e-06, "loss": 0.2122, "step": 2683 }, { "epoch": 15.07887323943662, "grad_norm": 0.9409608244895935, "learning_rate": 8.043e-06, "loss": 0.122, "step": 2684 }, { "epoch": 15.084507042253522, "grad_norm": 0.8258137106895447, "learning_rate": 8.046e-06, "loss": 0.1535, "step": 2685 }, { "epoch": 15.090140845070422, "grad_norm": 1.4846991300582886, "learning_rate": 8.049e-06, "loss": 0.1493, "step": 2686 }, { "epoch": 15.095774647887325, "grad_norm": 0.8132118582725525, "learning_rate": 8.052000000000002e-06, "loss": 0.1665, "step": 2687 }, { "epoch": 15.101408450704225, "grad_norm": 0.7702975273132324, "learning_rate": 8.055e-06, "loss": 0.1251, "step": 2688 }, { "epoch": 15.107042253521128, "grad_norm": 1.0893995761871338, "learning_rate": 8.058e-06, "loss": 0.1179, "step": 2689 }, { "epoch": 15.112676056338028, "grad_norm": 0.7828948497772217, "learning_rate": 8.061e-06, "loss": 0.1527, "step": 2690 }, { "epoch": 15.11830985915493, "grad_norm": 0.7459627985954285, "learning_rate": 8.064e-06, "loss": 0.1148, "step": 2691 }, { "epoch": 15.12394366197183, "grad_norm": 1.042141079902649, "learning_rate": 8.067e-06, "loss": 0.1762, "step": 2692 }, { "epoch": 15.129577464788733, "grad_norm": 1.023561716079712, "learning_rate": 8.07e-06, "loss": 0.1784, "step": 2693 }, { "epoch": 15.135211267605634, "grad_norm": 0.9534387588500977, "learning_rate": 8.073e-06, "loss": 0.1216, "step": 2694 }, { "epoch": 15.140845070422536, "grad_norm": 0.8539323210716248, "learning_rate": 8.076e-06, "loss": 0.124, "step": 2695 }, { "epoch": 15.146478873239436, "grad_norm": 0.7775946259498596, "learning_rate": 8.079e-06, "loss": 0.0872, "step": 2696 }, { "epoch": 15.152112676056339, "grad_norm": 1.1069778203964233, "learning_rate": 8.082e-06, "loss": 0.1777, "step": 2697 }, { "epoch": 15.15774647887324, "grad_norm": 0.9294941425323486, "learning_rate": 8.085000000000001e-06, "loss": 0.1394, "step": 2698 }, { "epoch": 15.163380281690142, "grad_norm": 0.868341863155365, "learning_rate": 8.088000000000001e-06, "loss": 0.1137, "step": 2699 }, { "epoch": 15.169014084507042, "grad_norm": 0.9345297813415527, "learning_rate": 8.091e-06, "loss": 0.1379, "step": 2700 }, { "epoch": 15.174647887323944, "grad_norm": 0.8293560743331909, "learning_rate": 8.093999999999999e-06, "loss": 0.1009, "step": 2701 }, { "epoch": 15.180281690140845, "grad_norm": 0.8270005583763123, "learning_rate": 8.096999999999999e-06, "loss": 0.0866, "step": 2702 }, { "epoch": 15.185915492957747, "grad_norm": 1.1075714826583862, "learning_rate": 8.1e-06, "loss": 0.1214, "step": 2703 }, { "epoch": 15.191549295774648, "grad_norm": 0.703230619430542, "learning_rate": 8.103e-06, "loss": 0.1533, "step": 2704 }, { "epoch": 15.19718309859155, "grad_norm": 0.9443958401679993, "learning_rate": 8.106e-06, "loss": 0.0775, "step": 2705 }, { "epoch": 15.20281690140845, "grad_norm": 1.3509917259216309, "learning_rate": 8.109e-06, "loss": 0.0999, "step": 2706 }, { "epoch": 15.208450704225353, "grad_norm": 0.9846903681755066, "learning_rate": 8.112e-06, "loss": 0.0957, "step": 2707 }, { "epoch": 15.214084507042253, "grad_norm": 0.8469778299331665, "learning_rate": 8.115000000000001e-06, "loss": 0.0607, "step": 2708 }, { "epoch": 15.219718309859156, "grad_norm": 0.7664074301719666, "learning_rate": 8.118000000000001e-06, "loss": 0.0856, "step": 2709 }, { "epoch": 15.225352112676056, "grad_norm": 0.8501128554344177, "learning_rate": 8.121e-06, "loss": 0.1201, "step": 2710 }, { "epoch": 15.230985915492958, "grad_norm": 0.9144497513771057, "learning_rate": 8.124e-06, "loss": 0.0811, "step": 2711 }, { "epoch": 15.236619718309859, "grad_norm": 1.465232253074646, "learning_rate": 8.126999999999999e-06, "loss": 0.1251, "step": 2712 }, { "epoch": 15.242253521126761, "grad_norm": 0.7066370844841003, "learning_rate": 8.13e-06, "loss": 0.0992, "step": 2713 }, { "epoch": 15.247887323943662, "grad_norm": 1.342861294746399, "learning_rate": 8.133e-06, "loss": 0.0865, "step": 2714 }, { "epoch": 15.253521126760564, "grad_norm": 1.2968153953552246, "learning_rate": 8.136e-06, "loss": 0.3282, "step": 2715 }, { "epoch": 15.259154929577464, "grad_norm": 1.1871732473373413, "learning_rate": 8.139e-06, "loss": 0.3236, "step": 2716 }, { "epoch": 15.264788732394367, "grad_norm": 0.9538513422012329, "learning_rate": 8.142e-06, "loss": 0.2896, "step": 2717 }, { "epoch": 15.270422535211267, "grad_norm": 0.7642456889152527, "learning_rate": 8.145e-06, "loss": 0.1953, "step": 2718 }, { "epoch": 15.27605633802817, "grad_norm": 0.9417255520820618, "learning_rate": 8.148e-06, "loss": 0.247, "step": 2719 }, { "epoch": 15.28169014084507, "grad_norm": 0.9153491258621216, "learning_rate": 8.151e-06, "loss": 0.2044, "step": 2720 }, { "epoch": 15.287323943661972, "grad_norm": 0.8322272300720215, "learning_rate": 8.154e-06, "loss": 0.2433, "step": 2721 }, { "epoch": 15.292957746478873, "grad_norm": 1.0533829927444458, "learning_rate": 8.157e-06, "loss": 0.2379, "step": 2722 }, { "epoch": 15.298591549295775, "grad_norm": 0.8990384340286255, "learning_rate": 8.160000000000001e-06, "loss": 0.1986, "step": 2723 }, { "epoch": 15.304225352112676, "grad_norm": 0.8271486759185791, "learning_rate": 8.163000000000001e-06, "loss": 0.1391, "step": 2724 }, { "epoch": 15.309859154929578, "grad_norm": 0.9895846843719482, "learning_rate": 8.166e-06, "loss": 0.2313, "step": 2725 }, { "epoch": 15.315492957746478, "grad_norm": 1.0710803270339966, "learning_rate": 8.169e-06, "loss": 0.1932, "step": 2726 }, { "epoch": 15.32112676056338, "grad_norm": 0.8945509791374207, "learning_rate": 8.171999999999999e-06, "loss": 0.2251, "step": 2727 }, { "epoch": 15.326760563380281, "grad_norm": 0.8598551154136658, "learning_rate": 8.175e-06, "loss": 0.2322, "step": 2728 }, { "epoch": 15.332394366197184, "grad_norm": 0.8095027208328247, "learning_rate": 8.178e-06, "loss": 0.1931, "step": 2729 }, { "epoch": 15.338028169014084, "grad_norm": 0.8386921286582947, "learning_rate": 8.181e-06, "loss": 0.224, "step": 2730 }, { "epoch": 15.343661971830986, "grad_norm": 1.002504825592041, "learning_rate": 8.184e-06, "loss": 0.2019, "step": 2731 }, { "epoch": 15.349295774647887, "grad_norm": 0.8359825611114502, "learning_rate": 8.187e-06, "loss": 0.1654, "step": 2732 }, { "epoch": 15.35492957746479, "grad_norm": 0.8392417430877686, "learning_rate": 8.190000000000001e-06, "loss": 0.1453, "step": 2733 }, { "epoch": 15.36056338028169, "grad_norm": 0.7815101742744446, "learning_rate": 8.193000000000001e-06, "loss": 0.1356, "step": 2734 }, { "epoch": 15.366197183098592, "grad_norm": 1.36123788356781, "learning_rate": 8.196e-06, "loss": 0.1854, "step": 2735 }, { "epoch": 15.371830985915492, "grad_norm": 1.2135112285614014, "learning_rate": 8.199e-06, "loss": 0.1435, "step": 2736 }, { "epoch": 15.377464788732395, "grad_norm": 0.8379486203193665, "learning_rate": 8.201999999999999e-06, "loss": 0.1575, "step": 2737 }, { "epoch": 15.383098591549295, "grad_norm": 0.9114857912063599, "learning_rate": 8.205e-06, "loss": 0.1297, "step": 2738 }, { "epoch": 15.388732394366198, "grad_norm": 1.0966999530792236, "learning_rate": 8.208e-06, "loss": 0.1426, "step": 2739 }, { "epoch": 15.394366197183098, "grad_norm": 1.0362664461135864, "learning_rate": 8.211e-06, "loss": 0.1452, "step": 2740 }, { "epoch": 15.4, "grad_norm": 0.993938684463501, "learning_rate": 8.214e-06, "loss": 0.099, "step": 2741 }, { "epoch": 15.4056338028169, "grad_norm": 0.9987441897392273, "learning_rate": 8.217e-06, "loss": 0.1315, "step": 2742 }, { "epoch": 15.411267605633803, "grad_norm": 0.5772591829299927, "learning_rate": 8.220000000000001e-06, "loss": 0.0766, "step": 2743 }, { "epoch": 15.416901408450704, "grad_norm": 1.2526097297668457, "learning_rate": 8.223e-06, "loss": 0.1145, "step": 2744 }, { "epoch": 15.422535211267606, "grad_norm": 0.8767606616020203, "learning_rate": 8.226e-06, "loss": 0.1184, "step": 2745 }, { "epoch": 15.428169014084506, "grad_norm": 0.8150380849838257, "learning_rate": 8.229e-06, "loss": 0.0981, "step": 2746 }, { "epoch": 15.433802816901409, "grad_norm": 1.3524956703186035, "learning_rate": 8.232e-06, "loss": 0.1066, "step": 2747 }, { "epoch": 15.43943661971831, "grad_norm": 0.674005925655365, "learning_rate": 8.235000000000002e-06, "loss": 0.0585, "step": 2748 }, { "epoch": 15.445070422535212, "grad_norm": 0.7301984429359436, "learning_rate": 8.238e-06, "loss": 0.0943, "step": 2749 }, { "epoch": 15.450704225352112, "grad_norm": 0.91359943151474, "learning_rate": 8.241e-06, "loss": 0.128, "step": 2750 }, { "epoch": 15.456338028169014, "grad_norm": 0.5797883868217468, "learning_rate": 8.244e-06, "loss": 0.0676, "step": 2751 }, { "epoch": 15.461971830985915, "grad_norm": 0.7641134858131409, "learning_rate": 8.246999999999999e-06, "loss": 0.0692, "step": 2752 }, { "epoch": 15.467605633802817, "grad_norm": 1.6063405275344849, "learning_rate": 8.25e-06, "loss": 0.1474, "step": 2753 }, { "epoch": 15.473239436619718, "grad_norm": 0.842797577381134, "learning_rate": 8.253e-06, "loss": 0.0716, "step": 2754 }, { "epoch": 15.47887323943662, "grad_norm": 1.2832893133163452, "learning_rate": 8.256e-06, "loss": 0.1136, "step": 2755 }, { "epoch": 15.48450704225352, "grad_norm": 0.6156191825866699, "learning_rate": 8.259e-06, "loss": 0.0593, "step": 2756 }, { "epoch": 15.490140845070423, "grad_norm": 0.721260130405426, "learning_rate": 8.262e-06, "loss": 0.0576, "step": 2757 }, { "epoch": 15.495774647887323, "grad_norm": 0.7410542964935303, "learning_rate": 8.265000000000001e-06, "loss": 0.0612, "step": 2758 }, { "epoch": 15.501408450704226, "grad_norm": 2.120276927947998, "learning_rate": 8.268000000000001e-06, "loss": 0.3586, "step": 2759 }, { "epoch": 15.507042253521126, "grad_norm": 1.3409016132354736, "learning_rate": 8.271000000000001e-06, "loss": 0.2372, "step": 2760 }, { "epoch": 15.512676056338028, "grad_norm": 0.9511126279830933, "learning_rate": 8.274e-06, "loss": 0.2861, "step": 2761 }, { "epoch": 15.518309859154929, "grad_norm": 0.7708508372306824, "learning_rate": 8.276999999999999e-06, "loss": 0.2447, "step": 2762 }, { "epoch": 15.523943661971831, "grad_norm": 0.7951607704162598, "learning_rate": 8.28e-06, "loss": 0.1817, "step": 2763 }, { "epoch": 15.529577464788732, "grad_norm": 0.7138514518737793, "learning_rate": 8.283e-06, "loss": 0.2336, "step": 2764 }, { "epoch": 15.535211267605634, "grad_norm": 0.9361281394958496, "learning_rate": 8.286e-06, "loss": 0.2046, "step": 2765 }, { "epoch": 15.540845070422534, "grad_norm": 0.9767065048217773, "learning_rate": 8.289e-06, "loss": 0.2453, "step": 2766 }, { "epoch": 15.546478873239437, "grad_norm": 0.8887463212013245, "learning_rate": 8.292e-06, "loss": 0.2159, "step": 2767 }, { "epoch": 15.552112676056337, "grad_norm": 0.9005931615829468, "learning_rate": 8.295000000000001e-06, "loss": 0.1668, "step": 2768 }, { "epoch": 15.55774647887324, "grad_norm": 1.1442692279815674, "learning_rate": 8.298000000000001e-06, "loss": 0.1514, "step": 2769 }, { "epoch": 15.56338028169014, "grad_norm": 1.0275840759277344, "learning_rate": 8.301e-06, "loss": 0.1672, "step": 2770 }, { "epoch": 15.569014084507042, "grad_norm": 1.0269551277160645, "learning_rate": 8.304e-06, "loss": 0.2106, "step": 2771 }, { "epoch": 15.574647887323943, "grad_norm": 0.9682632088661194, "learning_rate": 8.307e-06, "loss": 0.1638, "step": 2772 }, { "epoch": 15.580281690140845, "grad_norm": 0.9929102063179016, "learning_rate": 8.310000000000002e-06, "loss": 0.1694, "step": 2773 }, { "epoch": 15.585915492957746, "grad_norm": 0.7758603692054749, "learning_rate": 8.313e-06, "loss": 0.1291, "step": 2774 }, { "epoch": 15.591549295774648, "grad_norm": 1.039397120475769, "learning_rate": 8.316e-06, "loss": 0.106, "step": 2775 }, { "epoch": 15.597183098591549, "grad_norm": 0.7978324890136719, "learning_rate": 8.319e-06, "loss": 0.156, "step": 2776 }, { "epoch": 15.60281690140845, "grad_norm": 0.6685842871665955, "learning_rate": 8.322e-06, "loss": 0.0944, "step": 2777 }, { "epoch": 15.608450704225351, "grad_norm": 0.7431612014770508, "learning_rate": 8.325e-06, "loss": 0.1241, "step": 2778 }, { "epoch": 15.614084507042254, "grad_norm": 1.2736949920654297, "learning_rate": 8.328e-06, "loss": 0.1954, "step": 2779 }, { "epoch": 15.619718309859154, "grad_norm": 0.7188799977302551, "learning_rate": 8.331e-06, "loss": 0.1254, "step": 2780 }, { "epoch": 15.625352112676056, "grad_norm": 0.8509241342544556, "learning_rate": 8.334e-06, "loss": 0.1412, "step": 2781 }, { "epoch": 15.630985915492957, "grad_norm": 0.9205145239830017, "learning_rate": 8.337e-06, "loss": 0.1192, "step": 2782 }, { "epoch": 15.63661971830986, "grad_norm": 0.7206032276153564, "learning_rate": 8.340000000000001e-06, "loss": 0.0981, "step": 2783 }, { "epoch": 15.642253521126761, "grad_norm": 0.9130805134773254, "learning_rate": 8.343000000000001e-06, "loss": 0.1249, "step": 2784 }, { "epoch": 15.647887323943662, "grad_norm": 0.8492620587348938, "learning_rate": 8.346000000000001e-06, "loss": 0.1152, "step": 2785 }, { "epoch": 15.653521126760563, "grad_norm": 0.48178380727767944, "learning_rate": 8.349e-06, "loss": 0.0501, "step": 2786 }, { "epoch": 15.659154929577465, "grad_norm": 0.9286878108978271, "learning_rate": 8.351999999999999e-06, "loss": 0.1181, "step": 2787 }, { "epoch": 15.664788732394367, "grad_norm": 1.0384036302566528, "learning_rate": 8.355e-06, "loss": 0.1571, "step": 2788 }, { "epoch": 15.670422535211268, "grad_norm": 1.160064697265625, "learning_rate": 8.358e-06, "loss": 0.1062, "step": 2789 }, { "epoch": 15.676056338028168, "grad_norm": 1.03548264503479, "learning_rate": 8.361e-06, "loss": 0.1182, "step": 2790 }, { "epoch": 15.68169014084507, "grad_norm": 1.5913087129592896, "learning_rate": 8.364e-06, "loss": 0.1721, "step": 2791 }, { "epoch": 15.687323943661973, "grad_norm": 1.931322693824768, "learning_rate": 8.367e-06, "loss": 0.171, "step": 2792 }, { "epoch": 15.692957746478873, "grad_norm": 1.1467056274414062, "learning_rate": 8.370000000000001e-06, "loss": 0.0988, "step": 2793 }, { "epoch": 15.698591549295774, "grad_norm": 0.8115183711051941, "learning_rate": 8.373000000000001e-06, "loss": 0.1221, "step": 2794 }, { "epoch": 15.704225352112676, "grad_norm": 0.6418557167053223, "learning_rate": 8.376e-06, "loss": 0.1001, "step": 2795 }, { "epoch": 15.709859154929578, "grad_norm": 1.1489182710647583, "learning_rate": 8.379e-06, "loss": 0.1091, "step": 2796 }, { "epoch": 15.715492957746479, "grad_norm": 0.9547248482704163, "learning_rate": 8.382e-06, "loss": 0.0835, "step": 2797 }, { "epoch": 15.721126760563381, "grad_norm": 1.255326747894287, "learning_rate": 8.385e-06, "loss": 0.1233, "step": 2798 }, { "epoch": 15.726760563380282, "grad_norm": 0.9324260354042053, "learning_rate": 8.388e-06, "loss": 0.0812, "step": 2799 }, { "epoch": 15.732394366197184, "grad_norm": 0.9473111629486084, "learning_rate": 8.391e-06, "loss": 0.0688, "step": 2800 }, { "epoch": 15.738028169014084, "grad_norm": 2.664045572280884, "learning_rate": 8.394e-06, "loss": 0.1056, "step": 2801 }, { "epoch": 15.743661971830987, "grad_norm": 0.8926306962966919, "learning_rate": 8.397e-06, "loss": 0.1264, "step": 2802 }, { "epoch": 15.749295774647887, "grad_norm": 1.479974389076233, "learning_rate": 8.400000000000001e-06, "loss": 0.2752, "step": 2803 }, { "epoch": 15.75492957746479, "grad_norm": 1.3992557525634766, "learning_rate": 8.403e-06, "loss": 0.2637, "step": 2804 }, { "epoch": 15.76056338028169, "grad_norm": 0.9449517130851746, "learning_rate": 8.406e-06, "loss": 0.2101, "step": 2805 }, { "epoch": 15.766197183098592, "grad_norm": 2.0017249584198, "learning_rate": 8.409e-06, "loss": 0.2533, "step": 2806 }, { "epoch": 15.771830985915493, "grad_norm": 1.493827223777771, "learning_rate": 8.412e-06, "loss": 0.2703, "step": 2807 }, { "epoch": 15.777464788732395, "grad_norm": 1.0409066677093506, "learning_rate": 8.415000000000002e-06, "loss": 0.2495, "step": 2808 }, { "epoch": 15.783098591549296, "grad_norm": 0.6740534901618958, "learning_rate": 8.418000000000001e-06, "loss": 0.1875, "step": 2809 }, { "epoch": 15.788732394366198, "grad_norm": 0.9914717674255371, "learning_rate": 8.421000000000001e-06, "loss": 0.1817, "step": 2810 }, { "epoch": 15.794366197183098, "grad_norm": 1.086603045463562, "learning_rate": 8.424e-06, "loss": 0.1775, "step": 2811 }, { "epoch": 15.8, "grad_norm": 1.0153316259384155, "learning_rate": 8.426999999999999e-06, "loss": 0.1779, "step": 2812 }, { "epoch": 15.805633802816901, "grad_norm": 0.8724736571311951, "learning_rate": 8.43e-06, "loss": 0.2156, "step": 2813 }, { "epoch": 15.811267605633804, "grad_norm": 0.824389636516571, "learning_rate": 8.433e-06, "loss": 0.1818, "step": 2814 }, { "epoch": 15.816901408450704, "grad_norm": 0.7078551650047302, "learning_rate": 8.436e-06, "loss": 0.1588, "step": 2815 }, { "epoch": 15.822535211267606, "grad_norm": 1.1052594184875488, "learning_rate": 8.439e-06, "loss": 0.1799, "step": 2816 }, { "epoch": 15.828169014084507, "grad_norm": 0.9662029147148132, "learning_rate": 8.442e-06, "loss": 0.1357, "step": 2817 }, { "epoch": 15.83380281690141, "grad_norm": 0.7742413878440857, "learning_rate": 8.445e-06, "loss": 0.1609, "step": 2818 }, { "epoch": 15.83943661971831, "grad_norm": 0.6380323767662048, "learning_rate": 8.448000000000001e-06, "loss": 0.1164, "step": 2819 }, { "epoch": 15.845070422535212, "grad_norm": 1.2763640880584717, "learning_rate": 8.451000000000001e-06, "loss": 0.1927, "step": 2820 }, { "epoch": 15.850704225352112, "grad_norm": 0.8617068529129028, "learning_rate": 8.454e-06, "loss": 0.1558, "step": 2821 }, { "epoch": 15.856338028169015, "grad_norm": 0.6943731307983398, "learning_rate": 8.457e-06, "loss": 0.1145, "step": 2822 }, { "epoch": 15.861971830985915, "grad_norm": 0.7874083518981934, "learning_rate": 8.459999999999999e-06, "loss": 0.1132, "step": 2823 }, { "epoch": 15.867605633802818, "grad_norm": 0.7146321535110474, "learning_rate": 8.463e-06, "loss": 0.1235, "step": 2824 }, { "epoch": 15.873239436619718, "grad_norm": 1.0668017864227295, "learning_rate": 8.466e-06, "loss": 0.1073, "step": 2825 }, { "epoch": 15.87887323943662, "grad_norm": 0.7963314056396484, "learning_rate": 8.469e-06, "loss": 0.103, "step": 2826 }, { "epoch": 15.88450704225352, "grad_norm": 0.8345138430595398, "learning_rate": 8.472e-06, "loss": 0.1253, "step": 2827 }, { "epoch": 15.890140845070423, "grad_norm": 0.7264297604560852, "learning_rate": 8.475e-06, "loss": 0.1117, "step": 2828 }, { "epoch": 15.895774647887324, "grad_norm": 0.9832761883735657, "learning_rate": 8.478e-06, "loss": 0.1466, "step": 2829 }, { "epoch": 15.901408450704226, "grad_norm": 0.5885223150253296, "learning_rate": 8.481e-06, "loss": 0.0849, "step": 2830 }, { "epoch": 15.907042253521126, "grad_norm": 0.6958385109901428, "learning_rate": 8.484e-06, "loss": 0.0793, "step": 2831 }, { "epoch": 15.912676056338029, "grad_norm": 0.6612353324890137, "learning_rate": 8.487e-06, "loss": 0.1212, "step": 2832 }, { "epoch": 15.91830985915493, "grad_norm": 0.581894040107727, "learning_rate": 8.49e-06, "loss": 0.0682, "step": 2833 }, { "epoch": 15.923943661971832, "grad_norm": 1.1093391180038452, "learning_rate": 8.493000000000002e-06, "loss": 0.2146, "step": 2834 }, { "epoch": 15.929577464788732, "grad_norm": 0.747353196144104, "learning_rate": 8.496e-06, "loss": 0.0999, "step": 2835 }, { "epoch": 15.935211267605634, "grad_norm": 0.7556625604629517, "learning_rate": 8.499e-06, "loss": 0.0718, "step": 2836 }, { "epoch": 15.940845070422535, "grad_norm": 0.7712485194206238, "learning_rate": 8.502e-06, "loss": 0.0956, "step": 2837 }, { "epoch": 15.946478873239437, "grad_norm": 0.6484929323196411, "learning_rate": 8.504999999999999e-06, "loss": 0.1131, "step": 2838 }, { "epoch": 15.952112676056338, "grad_norm": 0.9547204375267029, "learning_rate": 8.508e-06, "loss": 0.1433, "step": 2839 }, { "epoch": 15.95774647887324, "grad_norm": 0.9918680191040039, "learning_rate": 8.511e-06, "loss": 0.1144, "step": 2840 }, { "epoch": 15.96338028169014, "grad_norm": 0.7647086381912231, "learning_rate": 8.514e-06, "loss": 0.1138, "step": 2841 }, { "epoch": 15.969014084507043, "grad_norm": 0.7989391088485718, "learning_rate": 8.517e-06, "loss": 0.1214, "step": 2842 }, { "epoch": 15.974647887323943, "grad_norm": 0.6360899806022644, "learning_rate": 8.52e-06, "loss": 0.0531, "step": 2843 }, { "epoch": 15.980281690140846, "grad_norm": 1.2566708326339722, "learning_rate": 8.523000000000001e-06, "loss": 0.1814, "step": 2844 }, { "epoch": 15.985915492957746, "grad_norm": 0.6348083019256592, "learning_rate": 8.526000000000001e-06, "loss": 0.0535, "step": 2845 }, { "epoch": 15.991549295774648, "grad_norm": 0.9602321982383728, "learning_rate": 8.529e-06, "loss": 0.0867, "step": 2846 }, { "epoch": 15.997183098591549, "grad_norm": 1.05921471118927, "learning_rate": 8.532e-06, "loss": 0.1638, "step": 2847 }, { "epoch": 16.0, "grad_norm": 0.4883933365345001, "learning_rate": 8.534999999999999e-06, "loss": 0.0371, "step": 2848 }, { "epoch": 16.005633802816902, "grad_norm": 1.1529399156570435, "learning_rate": 8.538e-06, "loss": 0.2428, "step": 2849 }, { "epoch": 16.011267605633805, "grad_norm": 0.8200862407684326, "learning_rate": 8.541e-06, "loss": 0.2393, "step": 2850 }, { "epoch": 16.016901408450703, "grad_norm": 0.7540697455406189, "learning_rate": 8.544e-06, "loss": 0.2571, "step": 2851 }, { "epoch": 16.022535211267606, "grad_norm": 1.3684117794036865, "learning_rate": 8.547e-06, "loss": 0.2641, "step": 2852 }, { "epoch": 16.028169014084508, "grad_norm": 1.673906922340393, "learning_rate": 8.55e-06, "loss": 0.2199, "step": 2853 }, { "epoch": 16.03380281690141, "grad_norm": 1.0687905550003052, "learning_rate": 8.553000000000001e-06, "loss": 0.2353, "step": 2854 }, { "epoch": 16.03943661971831, "grad_norm": 1.0001530647277832, "learning_rate": 8.556e-06, "loss": 0.231, "step": 2855 }, { "epoch": 16.04507042253521, "grad_norm": 0.9346075654029846, "learning_rate": 8.559e-06, "loss": 0.2188, "step": 2856 }, { "epoch": 16.050704225352113, "grad_norm": 0.779593825340271, "learning_rate": 8.562e-06, "loss": 0.1856, "step": 2857 }, { "epoch": 16.056338028169016, "grad_norm": 0.9275345802307129, "learning_rate": 8.565e-06, "loss": 0.1677, "step": 2858 }, { "epoch": 16.061971830985915, "grad_norm": 1.0503830909729004, "learning_rate": 8.568000000000002e-06, "loss": 0.2379, "step": 2859 }, { "epoch": 16.067605633802817, "grad_norm": 0.7354325652122498, "learning_rate": 8.571e-06, "loss": 0.1491, "step": 2860 }, { "epoch": 16.07323943661972, "grad_norm": 0.8676498532295227, "learning_rate": 8.574e-06, "loss": 0.192, "step": 2861 }, { "epoch": 16.07887323943662, "grad_norm": 0.8387866020202637, "learning_rate": 8.577e-06, "loss": 0.1461, "step": 2862 }, { "epoch": 16.08450704225352, "grad_norm": 0.8324146866798401, "learning_rate": 8.58e-06, "loss": 0.1667, "step": 2863 }, { "epoch": 16.090140845070422, "grad_norm": 0.6148998141288757, "learning_rate": 8.583e-06, "loss": 0.1166, "step": 2864 }, { "epoch": 16.095774647887325, "grad_norm": 0.5746515393257141, "learning_rate": 8.586e-06, "loss": 0.1199, "step": 2865 }, { "epoch": 16.101408450704227, "grad_norm": 0.8585340976715088, "learning_rate": 8.589e-06, "loss": 0.1432, "step": 2866 }, { "epoch": 16.107042253521126, "grad_norm": 0.7790031433105469, "learning_rate": 8.592e-06, "loss": 0.1133, "step": 2867 }, { "epoch": 16.112676056338028, "grad_norm": 0.8761863112449646, "learning_rate": 8.595e-06, "loss": 0.1503, "step": 2868 }, { "epoch": 16.11830985915493, "grad_norm": 0.6966165900230408, "learning_rate": 8.598000000000001e-06, "loss": 0.1202, "step": 2869 }, { "epoch": 16.123943661971833, "grad_norm": 0.7318444848060608, "learning_rate": 8.601000000000001e-06, "loss": 0.1406, "step": 2870 }, { "epoch": 16.12957746478873, "grad_norm": 0.9771597385406494, "learning_rate": 8.604000000000001e-06, "loss": 0.1605, "step": 2871 }, { "epoch": 16.135211267605634, "grad_norm": 0.6654000282287598, "learning_rate": 8.606999999999999e-06, "loss": 0.1073, "step": 2872 }, { "epoch": 16.140845070422536, "grad_norm": 0.6441059112548828, "learning_rate": 8.609999999999999e-06, "loss": 0.0908, "step": 2873 }, { "epoch": 16.146478873239438, "grad_norm": 0.7591120600700378, "learning_rate": 8.613e-06, "loss": 0.096, "step": 2874 }, { "epoch": 16.152112676056337, "grad_norm": 0.930167555809021, "learning_rate": 8.616e-06, "loss": 0.1705, "step": 2875 }, { "epoch": 16.15774647887324, "grad_norm": 0.791275143623352, "learning_rate": 8.619e-06, "loss": 0.0863, "step": 2876 }, { "epoch": 16.16338028169014, "grad_norm": 0.6328141689300537, "learning_rate": 8.622e-06, "loss": 0.0862, "step": 2877 }, { "epoch": 16.169014084507044, "grad_norm": 0.7014345526695251, "learning_rate": 8.625e-06, "loss": 0.1113, "step": 2878 }, { "epoch": 16.174647887323943, "grad_norm": 0.7163512706756592, "learning_rate": 8.628000000000001e-06, "loss": 0.0613, "step": 2879 }, { "epoch": 16.180281690140845, "grad_norm": 0.7760319113731384, "learning_rate": 8.631000000000001e-06, "loss": 0.0939, "step": 2880 }, { "epoch": 16.185915492957747, "grad_norm": 0.9137998819351196, "learning_rate": 8.634e-06, "loss": 0.1154, "step": 2881 }, { "epoch": 16.19154929577465, "grad_norm": 0.8026256561279297, "learning_rate": 8.637e-06, "loss": 0.0901, "step": 2882 }, { "epoch": 16.197183098591548, "grad_norm": 1.02559494972229, "learning_rate": 8.64e-06, "loss": 0.1129, "step": 2883 }, { "epoch": 16.20281690140845, "grad_norm": 0.7272340059280396, "learning_rate": 8.643e-06, "loss": 0.079, "step": 2884 }, { "epoch": 16.208450704225353, "grad_norm": 0.893150269985199, "learning_rate": 8.646e-06, "loss": 0.1241, "step": 2885 }, { "epoch": 16.214084507042255, "grad_norm": 0.8939018249511719, "learning_rate": 8.649e-06, "loss": 0.0689, "step": 2886 }, { "epoch": 16.219718309859154, "grad_norm": 0.7538763880729675, "learning_rate": 8.652e-06, "loss": 0.0934, "step": 2887 }, { "epoch": 16.225352112676056, "grad_norm": 0.9086311459541321, "learning_rate": 8.655e-06, "loss": 0.1309, "step": 2888 }, { "epoch": 16.23098591549296, "grad_norm": 0.5686888098716736, "learning_rate": 8.658e-06, "loss": 0.0725, "step": 2889 }, { "epoch": 16.23661971830986, "grad_norm": 1.2001277208328247, "learning_rate": 8.661e-06, "loss": 0.0573, "step": 2890 }, { "epoch": 16.24225352112676, "grad_norm": 0.6721484661102295, "learning_rate": 8.664e-06, "loss": 0.0944, "step": 2891 }, { "epoch": 16.24788732394366, "grad_norm": 0.7028104066848755, "learning_rate": 8.667e-06, "loss": 0.055, "step": 2892 }, { "epoch": 16.253521126760564, "grad_norm": 1.258417010307312, "learning_rate": 8.67e-06, "loss": 0.2857, "step": 2893 }, { "epoch": 16.259154929577466, "grad_norm": 0.9957136511802673, "learning_rate": 8.673000000000001e-06, "loss": 0.2808, "step": 2894 }, { "epoch": 16.264788732394365, "grad_norm": 0.7730371952056885, "learning_rate": 8.676000000000001e-06, "loss": 0.258, "step": 2895 }, { "epoch": 16.270422535211267, "grad_norm": 0.8101836442947388, "learning_rate": 8.679000000000001e-06, "loss": 0.2172, "step": 2896 }, { "epoch": 16.27605633802817, "grad_norm": 0.9684430956840515, "learning_rate": 8.682e-06, "loss": 0.2093, "step": 2897 }, { "epoch": 16.281690140845072, "grad_norm": 0.6897566318511963, "learning_rate": 8.684999999999999e-06, "loss": 0.2118, "step": 2898 }, { "epoch": 16.28732394366197, "grad_norm": 0.8289135098457336, "learning_rate": 8.688e-06, "loss": 0.1837, "step": 2899 }, { "epoch": 16.292957746478873, "grad_norm": 0.8668294548988342, "learning_rate": 8.691e-06, "loss": 0.2304, "step": 2900 }, { "epoch": 16.298591549295775, "grad_norm": 0.7824131846427917, "learning_rate": 8.694e-06, "loss": 0.2116, "step": 2901 }, { "epoch": 16.304225352112677, "grad_norm": 1.1551772356033325, "learning_rate": 8.697e-06, "loss": 0.1491, "step": 2902 }, { "epoch": 16.309859154929576, "grad_norm": 0.742215633392334, "learning_rate": 8.7e-06, "loss": 0.1666, "step": 2903 }, { "epoch": 16.31549295774648, "grad_norm": 0.7424885034561157, "learning_rate": 8.703000000000001e-06, "loss": 0.17, "step": 2904 }, { "epoch": 16.32112676056338, "grad_norm": 0.8499301671981812, "learning_rate": 8.706000000000001e-06, "loss": 0.1666, "step": 2905 }, { "epoch": 16.326760563380283, "grad_norm": 0.7001984715461731, "learning_rate": 8.709e-06, "loss": 0.1273, "step": 2906 }, { "epoch": 16.33239436619718, "grad_norm": 0.8283653259277344, "learning_rate": 8.712e-06, "loss": 0.1655, "step": 2907 }, { "epoch": 16.338028169014084, "grad_norm": 0.6067949533462524, "learning_rate": 8.715e-06, "loss": 0.1517, "step": 2908 }, { "epoch": 16.343661971830986, "grad_norm": 0.5631360411643982, "learning_rate": 8.718e-06, "loss": 0.1043, "step": 2909 }, { "epoch": 16.34929577464789, "grad_norm": 0.9717921018600464, "learning_rate": 8.721e-06, "loss": 0.1604, "step": 2910 }, { "epoch": 16.354929577464787, "grad_norm": 0.8226531147956848, "learning_rate": 8.724e-06, "loss": 0.1341, "step": 2911 }, { "epoch": 16.36056338028169, "grad_norm": 0.8098730444908142, "learning_rate": 8.727e-06, "loss": 0.0886, "step": 2912 }, { "epoch": 16.366197183098592, "grad_norm": 0.9211949110031128, "learning_rate": 8.73e-06, "loss": 0.1342, "step": 2913 }, { "epoch": 16.371830985915494, "grad_norm": 0.9664998650550842, "learning_rate": 8.733000000000001e-06, "loss": 0.1436, "step": 2914 }, { "epoch": 16.377464788732393, "grad_norm": 0.7152832746505737, "learning_rate": 8.736e-06, "loss": 0.1131, "step": 2915 }, { "epoch": 16.383098591549295, "grad_norm": 2.146066427230835, "learning_rate": 8.739e-06, "loss": 0.1066, "step": 2916 }, { "epoch": 16.388732394366198, "grad_norm": 1.373637080192566, "learning_rate": 8.742e-06, "loss": 0.0948, "step": 2917 }, { "epoch": 16.3943661971831, "grad_norm": 1.3039276599884033, "learning_rate": 8.745e-06, "loss": 0.151, "step": 2918 }, { "epoch": 16.4, "grad_norm": 0.7451199889183044, "learning_rate": 8.748000000000002e-06, "loss": 0.1152, "step": 2919 }, { "epoch": 16.4056338028169, "grad_norm": 0.5935131907463074, "learning_rate": 8.751000000000001e-06, "loss": 0.0833, "step": 2920 }, { "epoch": 16.411267605633803, "grad_norm": 0.6291968822479248, "learning_rate": 8.754e-06, "loss": 0.083, "step": 2921 }, { "epoch": 16.416901408450705, "grad_norm": 0.9987042546272278, "learning_rate": 8.757e-06, "loss": 0.0969, "step": 2922 }, { "epoch": 16.422535211267604, "grad_norm": 0.8387894034385681, "learning_rate": 8.759999999999999e-06, "loss": 0.1002, "step": 2923 }, { "epoch": 16.428169014084506, "grad_norm": 1.1793060302734375, "learning_rate": 8.763e-06, "loss": 0.1521, "step": 2924 }, { "epoch": 16.43380281690141, "grad_norm": 1.229485273361206, "learning_rate": 8.766e-06, "loss": 0.1172, "step": 2925 }, { "epoch": 16.43943661971831, "grad_norm": 0.6822589039802551, "learning_rate": 8.769e-06, "loss": 0.0967, "step": 2926 }, { "epoch": 16.44507042253521, "grad_norm": 0.5700380206108093, "learning_rate": 8.772e-06, "loss": 0.1021, "step": 2927 }, { "epoch": 16.450704225352112, "grad_norm": 0.7322880029678345, "learning_rate": 8.775e-06, "loss": 0.0979, "step": 2928 }, { "epoch": 16.456338028169014, "grad_norm": 1.0937310457229614, "learning_rate": 8.778000000000001e-06, "loss": 0.0739, "step": 2929 }, { "epoch": 16.461971830985917, "grad_norm": 0.8712027668952942, "learning_rate": 8.781000000000001e-06, "loss": 0.0895, "step": 2930 }, { "epoch": 16.467605633802815, "grad_norm": 0.8388264179229736, "learning_rate": 8.784000000000001e-06, "loss": 0.1329, "step": 2931 }, { "epoch": 16.473239436619718, "grad_norm": 0.867808997631073, "learning_rate": 8.787e-06, "loss": 0.1203, "step": 2932 }, { "epoch": 16.47887323943662, "grad_norm": 0.6732862591743469, "learning_rate": 8.79e-06, "loss": 0.0666, "step": 2933 }, { "epoch": 16.484507042253522, "grad_norm": 0.8210992217063904, "learning_rate": 8.793e-06, "loss": 0.1016, "step": 2934 }, { "epoch": 16.49014084507042, "grad_norm": 0.6380671262741089, "learning_rate": 8.796e-06, "loss": 0.0853, "step": 2935 }, { "epoch": 16.495774647887323, "grad_norm": 0.8860544562339783, "learning_rate": 8.799e-06, "loss": 0.0728, "step": 2936 }, { "epoch": 16.501408450704226, "grad_norm": 1.0764464139938354, "learning_rate": 8.802e-06, "loss": 0.2764, "step": 2937 }, { "epoch": 16.507042253521128, "grad_norm": 0.8818057179450989, "learning_rate": 8.805e-06, "loss": 0.218, "step": 2938 }, { "epoch": 16.512676056338027, "grad_norm": 0.9122177362442017, "learning_rate": 8.808000000000001e-06, "loss": 0.22, "step": 2939 }, { "epoch": 16.51830985915493, "grad_norm": 0.7297251224517822, "learning_rate": 8.811000000000001e-06, "loss": 0.2024, "step": 2940 }, { "epoch": 16.52394366197183, "grad_norm": 1.2650741338729858, "learning_rate": 8.814e-06, "loss": 0.1922, "step": 2941 }, { "epoch": 16.529577464788733, "grad_norm": 0.9265139102935791, "learning_rate": 8.817e-06, "loss": 0.227, "step": 2942 }, { "epoch": 16.535211267605632, "grad_norm": 0.7995975017547607, "learning_rate": 8.82e-06, "loss": 0.2172, "step": 2943 }, { "epoch": 16.540845070422534, "grad_norm": 3.008139133453369, "learning_rate": 8.823e-06, "loss": 0.1773, "step": 2944 }, { "epoch": 16.546478873239437, "grad_norm": 0.7010889649391174, "learning_rate": 8.826000000000002e-06, "loss": 0.1514, "step": 2945 }, { "epoch": 16.55211267605634, "grad_norm": 0.6947098970413208, "learning_rate": 8.829e-06, "loss": 0.163, "step": 2946 }, { "epoch": 16.557746478873238, "grad_norm": 1.091259479522705, "learning_rate": 8.832e-06, "loss": 0.2004, "step": 2947 }, { "epoch": 16.56338028169014, "grad_norm": 0.7184098362922668, "learning_rate": 8.835e-06, "loss": 0.1655, "step": 2948 }, { "epoch": 16.569014084507042, "grad_norm": 0.911738932132721, "learning_rate": 8.837999999999999e-06, "loss": 0.1858, "step": 2949 }, { "epoch": 16.574647887323945, "grad_norm": 1.134534478187561, "learning_rate": 8.841e-06, "loss": 0.2336, "step": 2950 }, { "epoch": 16.580281690140843, "grad_norm": 0.6312000751495361, "learning_rate": 8.844e-06, "loss": 0.1271, "step": 2951 }, { "epoch": 16.585915492957746, "grad_norm": 0.860080361366272, "learning_rate": 8.847e-06, "loss": 0.1434, "step": 2952 }, { "epoch": 16.591549295774648, "grad_norm": 0.7159374952316284, "learning_rate": 8.85e-06, "loss": 0.099, "step": 2953 }, { "epoch": 16.59718309859155, "grad_norm": 0.8333531022071838, "learning_rate": 8.853e-06, "loss": 0.1498, "step": 2954 }, { "epoch": 16.60281690140845, "grad_norm": 0.758059024810791, "learning_rate": 8.856000000000001e-06, "loss": 0.1384, "step": 2955 }, { "epoch": 16.60845070422535, "grad_norm": 0.7951486110687256, "learning_rate": 8.859000000000001e-06, "loss": 0.132, "step": 2956 }, { "epoch": 16.614084507042254, "grad_norm": 0.6073364019393921, "learning_rate": 8.862000000000001e-06, "loss": 0.1027, "step": 2957 }, { "epoch": 16.619718309859156, "grad_norm": 1.1833101511001587, "learning_rate": 8.864999999999999e-06, "loss": 0.1419, "step": 2958 }, { "epoch": 16.625352112676055, "grad_norm": 0.8729153275489807, "learning_rate": 8.867999999999999e-06, "loss": 0.1384, "step": 2959 }, { "epoch": 16.630985915492957, "grad_norm": 0.8305640816688538, "learning_rate": 8.871e-06, "loss": 0.1437, "step": 2960 }, { "epoch": 16.63661971830986, "grad_norm": 0.7726940512657166, "learning_rate": 8.874e-06, "loss": 0.1121, "step": 2961 }, { "epoch": 16.64225352112676, "grad_norm": 0.6660939455032349, "learning_rate": 8.877e-06, "loss": 0.089, "step": 2962 }, { "epoch": 16.647887323943664, "grad_norm": 0.6047274470329285, "learning_rate": 8.88e-06, "loss": 0.1047, "step": 2963 }, { "epoch": 16.653521126760563, "grad_norm": 0.7954800724983215, "learning_rate": 8.883e-06, "loss": 0.1153, "step": 2964 }, { "epoch": 16.659154929577465, "grad_norm": 1.2811477184295654, "learning_rate": 8.886000000000001e-06, "loss": 0.1056, "step": 2965 }, { "epoch": 16.664788732394367, "grad_norm": 0.7188932299613953, "learning_rate": 8.889e-06, "loss": 0.0955, "step": 2966 }, { "epoch": 16.670422535211266, "grad_norm": 0.8470908999443054, "learning_rate": 8.892e-06, "loss": 0.1142, "step": 2967 }, { "epoch": 16.676056338028168, "grad_norm": 0.7260785698890686, "learning_rate": 8.895e-06, "loss": 0.0926, "step": 2968 }, { "epoch": 16.68169014084507, "grad_norm": 0.8436051607131958, "learning_rate": 8.898e-06, "loss": 0.0744, "step": 2969 }, { "epoch": 16.687323943661973, "grad_norm": 0.9941674470901489, "learning_rate": 8.901e-06, "loss": 0.1168, "step": 2970 }, { "epoch": 16.692957746478875, "grad_norm": 1.1118665933609009, "learning_rate": 8.904e-06, "loss": 0.1083, "step": 2971 }, { "epoch": 16.698591549295774, "grad_norm": 0.7850687503814697, "learning_rate": 8.907e-06, "loss": 0.0948, "step": 2972 }, { "epoch": 16.704225352112676, "grad_norm": 1.0018725395202637, "learning_rate": 8.91e-06, "loss": 0.1124, "step": 2973 }, { "epoch": 16.70985915492958, "grad_norm": 0.825855553150177, "learning_rate": 8.913e-06, "loss": 0.1274, "step": 2974 }, { "epoch": 16.71549295774648, "grad_norm": 0.5194077491760254, "learning_rate": 8.916e-06, "loss": 0.0548, "step": 2975 }, { "epoch": 16.72112676056338, "grad_norm": 0.6820377111434937, "learning_rate": 8.919e-06, "loss": 0.0709, "step": 2976 }, { "epoch": 16.72676056338028, "grad_norm": 0.6180262565612793, "learning_rate": 8.922e-06, "loss": 0.048, "step": 2977 }, { "epoch": 16.732394366197184, "grad_norm": 0.8627561330795288, "learning_rate": 8.925e-06, "loss": 0.1208, "step": 2978 }, { "epoch": 16.738028169014086, "grad_norm": 0.8664929866790771, "learning_rate": 8.928e-06, "loss": 0.0587, "step": 2979 }, { "epoch": 16.743661971830985, "grad_norm": 0.8254337906837463, "learning_rate": 8.931000000000001e-06, "loss": 0.0626, "step": 2980 }, { "epoch": 16.749295774647887, "grad_norm": 2.428992509841919, "learning_rate": 8.934000000000001e-06, "loss": 0.3544, "step": 2981 }, { "epoch": 16.75492957746479, "grad_norm": 1.1950771808624268, "learning_rate": 8.937000000000001e-06, "loss": 0.2874, "step": 2982 }, { "epoch": 16.760563380281692, "grad_norm": 1.0256483554840088, "learning_rate": 8.939999999999999e-06, "loss": 0.2223, "step": 2983 }, { "epoch": 16.76619718309859, "grad_norm": 1.142840027809143, "learning_rate": 8.942999999999999e-06, "loss": 0.3094, "step": 2984 }, { "epoch": 16.771830985915493, "grad_norm": 0.9689809083938599, "learning_rate": 8.946e-06, "loss": 0.2277, "step": 2985 }, { "epoch": 16.777464788732395, "grad_norm": 0.7120257019996643, "learning_rate": 8.949e-06, "loss": 0.2101, "step": 2986 }, { "epoch": 16.783098591549297, "grad_norm": 0.6611700654029846, "learning_rate": 8.952e-06, "loss": 0.155, "step": 2987 }, { "epoch": 16.788732394366196, "grad_norm": 0.8778490424156189, "learning_rate": 8.955e-06, "loss": 0.252, "step": 2988 }, { "epoch": 16.7943661971831, "grad_norm": 0.7665641903877258, "learning_rate": 8.958e-06, "loss": 0.1532, "step": 2989 }, { "epoch": 16.8, "grad_norm": 0.6630392074584961, "learning_rate": 8.961000000000001e-06, "loss": 0.1606, "step": 2990 }, { "epoch": 16.805633802816903, "grad_norm": 2.923142433166504, "learning_rate": 8.964000000000001e-06, "loss": 0.1828, "step": 2991 }, { "epoch": 16.8112676056338, "grad_norm": 0.6735742688179016, "learning_rate": 8.967e-06, "loss": 0.148, "step": 2992 }, { "epoch": 16.816901408450704, "grad_norm": 0.8473266363143921, "learning_rate": 8.97e-06, "loss": 0.1858, "step": 2993 }, { "epoch": 16.822535211267606, "grad_norm": 0.8506279587745667, "learning_rate": 8.973e-06, "loss": 0.1883, "step": 2994 }, { "epoch": 16.82816901408451, "grad_norm": 1.112320899963379, "learning_rate": 8.976e-06, "loss": 0.142, "step": 2995 }, { "epoch": 16.833802816901407, "grad_norm": 1.0212842226028442, "learning_rate": 8.979e-06, "loss": 0.1949, "step": 2996 }, { "epoch": 16.83943661971831, "grad_norm": 0.9373661875724792, "learning_rate": 8.982e-06, "loss": 0.1963, "step": 2997 }, { "epoch": 16.845070422535212, "grad_norm": 0.6234984993934631, "learning_rate": 8.985e-06, "loss": 0.1291, "step": 2998 }, { "epoch": 16.850704225352114, "grad_norm": 0.8292663097381592, "learning_rate": 8.988e-06, "loss": 0.1321, "step": 2999 }, { "epoch": 16.856338028169013, "grad_norm": 0.8266330361366272, "learning_rate": 8.991e-06, "loss": 0.1104, "step": 3000 }, { "epoch": 16.856338028169013, "eval_cer": 0.1181792259636571, "eval_loss": 0.34039416909217834, "eval_runtime": 16.126, "eval_samples_per_second": 18.852, "eval_steps_per_second": 0.62, "eval_wer": 0.41653875671527246, "step": 3000 }, { "epoch": 16.861971830985915, "grad_norm": 1.0222625732421875, "learning_rate": 8.994e-06, "loss": 0.1311, "step": 3001 }, { "epoch": 16.867605633802818, "grad_norm": 0.8006466031074524, "learning_rate": 8.997e-06, "loss": 0.1475, "step": 3002 }, { "epoch": 16.87323943661972, "grad_norm": 0.830461323261261, "learning_rate": 9e-06, "loss": 0.1081, "step": 3003 }, { "epoch": 16.87887323943662, "grad_norm": 0.6896873712539673, "learning_rate": 9.003e-06, "loss": 0.092, "step": 3004 }, { "epoch": 16.88450704225352, "grad_norm": 1.2886252403259277, "learning_rate": 9.006000000000002e-06, "loss": 0.1294, "step": 3005 }, { "epoch": 16.890140845070423, "grad_norm": 0.6785808801651001, "learning_rate": 9.009000000000001e-06, "loss": 0.0922, "step": 3006 }, { "epoch": 16.895774647887325, "grad_norm": 0.6470662355422974, "learning_rate": 9.012e-06, "loss": 0.0975, "step": 3007 }, { "epoch": 16.901408450704224, "grad_norm": 0.5824700593948364, "learning_rate": 9.015e-06, "loss": 0.0847, "step": 3008 }, { "epoch": 16.907042253521126, "grad_norm": 0.6603668332099915, "learning_rate": 9.017999999999999e-06, "loss": 0.0676, "step": 3009 }, { "epoch": 16.91267605633803, "grad_norm": 1.0298947095870972, "learning_rate": 9.021e-06, "loss": 0.2034, "step": 3010 }, { "epoch": 16.91830985915493, "grad_norm": 0.6118067502975464, "learning_rate": 9.024e-06, "loss": 0.0846, "step": 3011 }, { "epoch": 16.92394366197183, "grad_norm": 0.6924065351486206, "learning_rate": 9.027e-06, "loss": 0.0749, "step": 3012 }, { "epoch": 16.929577464788732, "grad_norm": 1.720041036605835, "learning_rate": 9.03e-06, "loss": 0.2113, "step": 3013 }, { "epoch": 16.935211267605634, "grad_norm": 1.0267527103424072, "learning_rate": 9.033e-06, "loss": 0.111, "step": 3014 }, { "epoch": 16.940845070422537, "grad_norm": 0.7971035242080688, "learning_rate": 9.036000000000001e-06, "loss": 0.0655, "step": 3015 }, { "epoch": 16.946478873239435, "grad_norm": 0.839490532875061, "learning_rate": 9.039000000000001e-06, "loss": 0.0822, "step": 3016 }, { "epoch": 16.952112676056338, "grad_norm": 0.7278625965118408, "learning_rate": 9.042e-06, "loss": 0.0957, "step": 3017 }, { "epoch": 16.95774647887324, "grad_norm": 1.1892837285995483, "learning_rate": 9.045e-06, "loss": 0.0665, "step": 3018 }, { "epoch": 16.963380281690142, "grad_norm": 1.006429672241211, "learning_rate": 9.048e-06, "loss": 0.0882, "step": 3019 }, { "epoch": 16.96901408450704, "grad_norm": 0.5892338156700134, "learning_rate": 9.051e-06, "loss": 0.0889, "step": 3020 }, { "epoch": 16.974647887323943, "grad_norm": 0.9219954013824463, "learning_rate": 9.054e-06, "loss": 0.0717, "step": 3021 }, { "epoch": 16.980281690140846, "grad_norm": 1.3822382688522339, "learning_rate": 9.057e-06, "loss": 0.0845, "step": 3022 }, { "epoch": 16.985915492957748, "grad_norm": 0.8605322241783142, "learning_rate": 9.06e-06, "loss": 0.1097, "step": 3023 }, { "epoch": 16.991549295774647, "grad_norm": 2.076396942138672, "learning_rate": 9.063e-06, "loss": 0.1406, "step": 3024 }, { "epoch": 16.99718309859155, "grad_norm": 1.184332013130188, "learning_rate": 9.066000000000001e-06, "loss": 0.1704, "step": 3025 }, { "epoch": 17.0, "grad_norm": 0.4899880290031433, "learning_rate": 9.069e-06, "loss": 0.0283, "step": 3026 }, { "epoch": 17.005633802816902, "grad_norm": 4.838672637939453, "learning_rate": 9.072e-06, "loss": 0.2142, "step": 3027 }, { "epoch": 17.011267605633805, "grad_norm": 1.1314750909805298, "learning_rate": 9.075e-06, "loss": 0.2123, "step": 3028 }, { "epoch": 17.016901408450703, "grad_norm": 0.7342777848243713, "learning_rate": 9.078e-06, "loss": 0.2204, "step": 3029 }, { "epoch": 17.022535211267606, "grad_norm": 0.8950889706611633, "learning_rate": 9.081000000000002e-06, "loss": 0.2127, "step": 3030 }, { "epoch": 17.028169014084508, "grad_norm": 0.6780038475990295, "learning_rate": 9.084000000000001e-06, "loss": 0.1742, "step": 3031 }, { "epoch": 17.03380281690141, "grad_norm": 0.9547369480133057, "learning_rate": 9.087e-06, "loss": 0.2509, "step": 3032 }, { "epoch": 17.03943661971831, "grad_norm": 1.0978807210922241, "learning_rate": 9.09e-06, "loss": 0.2153, "step": 3033 }, { "epoch": 17.04507042253521, "grad_norm": 0.9788209795951843, "learning_rate": 9.093e-06, "loss": 0.2253, "step": 3034 }, { "epoch": 17.050704225352113, "grad_norm": 0.8464028835296631, "learning_rate": 9.096e-06, "loss": 0.1951, "step": 3035 }, { "epoch": 17.056338028169016, "grad_norm": 0.8530139923095703, "learning_rate": 9.099e-06, "loss": 0.1377, "step": 3036 }, { "epoch": 17.061971830985915, "grad_norm": 0.9975249767303467, "learning_rate": 9.102e-06, "loss": 0.2222, "step": 3037 }, { "epoch": 17.067605633802817, "grad_norm": 0.7306218147277832, "learning_rate": 9.105e-06, "loss": 0.1791, "step": 3038 }, { "epoch": 17.07323943661972, "grad_norm": 0.766314685344696, "learning_rate": 9.108e-06, "loss": 0.1907, "step": 3039 }, { "epoch": 17.07887323943662, "grad_norm": 0.884260892868042, "learning_rate": 9.111000000000001e-06, "loss": 0.1104, "step": 3040 }, { "epoch": 17.08450704225352, "grad_norm": 0.585534393787384, "learning_rate": 9.114000000000001e-06, "loss": 0.1086, "step": 3041 }, { "epoch": 17.090140845070422, "grad_norm": 0.8080013990402222, "learning_rate": 9.117000000000001e-06, "loss": 0.1252, "step": 3042 }, { "epoch": 17.095774647887325, "grad_norm": 0.7447844743728638, "learning_rate": 9.12e-06, "loss": 0.1669, "step": 3043 }, { "epoch": 17.101408450704227, "grad_norm": 0.6644817590713501, "learning_rate": 9.122999999999999e-06, "loss": 0.1374, "step": 3044 }, { "epoch": 17.107042253521126, "grad_norm": 0.9485454559326172, "learning_rate": 9.126e-06, "loss": 0.1578, "step": 3045 }, { "epoch": 17.112676056338028, "grad_norm": 0.6433545351028442, "learning_rate": 9.129e-06, "loss": 0.098, "step": 3046 }, { "epoch": 17.11830985915493, "grad_norm": 1.0256527662277222, "learning_rate": 9.132e-06, "loss": 0.1189, "step": 3047 }, { "epoch": 17.123943661971833, "grad_norm": 0.6666250228881836, "learning_rate": 9.135e-06, "loss": 0.0987, "step": 3048 }, { "epoch": 17.12957746478873, "grad_norm": 1.0305372476577759, "learning_rate": 9.138e-06, "loss": 0.1401, "step": 3049 }, { "epoch": 17.135211267605634, "grad_norm": 1.0369501113891602, "learning_rate": 9.141000000000001e-06, "loss": 0.1453, "step": 3050 }, { "epoch": 17.140845070422536, "grad_norm": 0.8077501654624939, "learning_rate": 9.144000000000001e-06, "loss": 0.1185, "step": 3051 }, { "epoch": 17.146478873239438, "grad_norm": 0.7837086915969849, "learning_rate": 9.147e-06, "loss": 0.1034, "step": 3052 }, { "epoch": 17.152112676056337, "grad_norm": 0.8186353445053101, "learning_rate": 9.15e-06, "loss": 0.0959, "step": 3053 }, { "epoch": 17.15774647887324, "grad_norm": 0.7710402607917786, "learning_rate": 9.153e-06, "loss": 0.0812, "step": 3054 }, { "epoch": 17.16338028169014, "grad_norm": 0.7395483255386353, "learning_rate": 9.156000000000002e-06, "loss": 0.0681, "step": 3055 }, { "epoch": 17.169014084507044, "grad_norm": 1.0596472024917603, "learning_rate": 9.159e-06, "loss": 0.1121, "step": 3056 }, { "epoch": 17.174647887323943, "grad_norm": 0.6450384855270386, "learning_rate": 9.162e-06, "loss": 0.0787, "step": 3057 }, { "epoch": 17.180281690140845, "grad_norm": 0.6624043583869934, "learning_rate": 9.165e-06, "loss": 0.0694, "step": 3058 }, { "epoch": 17.185915492957747, "grad_norm": 0.9088671207427979, "learning_rate": 9.168e-06, "loss": 0.0751, "step": 3059 }, { "epoch": 17.19154929577465, "grad_norm": 0.6026711463928223, "learning_rate": 9.171e-06, "loss": 0.0914, "step": 3060 }, { "epoch": 17.197183098591548, "grad_norm": 0.7043696641921997, "learning_rate": 9.174e-06, "loss": 0.0635, "step": 3061 }, { "epoch": 17.20281690140845, "grad_norm": 0.8514806628227234, "learning_rate": 9.177e-06, "loss": 0.1009, "step": 3062 }, { "epoch": 17.208450704225353, "grad_norm": 0.8971308469772339, "learning_rate": 9.18e-06, "loss": 0.0792, "step": 3063 }, { "epoch": 17.214084507042255, "grad_norm": 0.693361759185791, "learning_rate": 9.183e-06, "loss": 0.1078, "step": 3064 }, { "epoch": 17.219718309859154, "grad_norm": 0.812265157699585, "learning_rate": 9.186000000000001e-06, "loss": 0.0666, "step": 3065 }, { "epoch": 17.225352112676056, "grad_norm": 4.320266246795654, "learning_rate": 9.189000000000001e-06, "loss": 0.1129, "step": 3066 }, { "epoch": 17.23098591549296, "grad_norm": 0.6853795647621155, "learning_rate": 9.192000000000001e-06, "loss": 0.0647, "step": 3067 }, { "epoch": 17.23661971830986, "grad_norm": 0.5804848074913025, "learning_rate": 9.195000000000001e-06, "loss": 0.0954, "step": 3068 }, { "epoch": 17.24225352112676, "grad_norm": 0.6321655511856079, "learning_rate": 9.197999999999999e-06, "loss": 0.0687, "step": 3069 }, { "epoch": 17.24788732394366, "grad_norm": 1.2159277200698853, "learning_rate": 9.200999999999999e-06, "loss": 0.1308, "step": 3070 }, { "epoch": 17.253521126760564, "grad_norm": 1.102030634880066, "learning_rate": 9.204e-06, "loss": 0.2314, "step": 3071 }, { "epoch": 17.259154929577466, "grad_norm": 1.386979579925537, "learning_rate": 9.207e-06, "loss": 0.2973, "step": 3072 }, { "epoch": 17.264788732394365, "grad_norm": 0.8723505139350891, "learning_rate": 9.21e-06, "loss": 0.234, "step": 3073 }, { "epoch": 17.270422535211267, "grad_norm": 0.8602634072303772, "learning_rate": 9.213e-06, "loss": 0.2506, "step": 3074 }, { "epoch": 17.27605633802817, "grad_norm": 0.7992596626281738, "learning_rate": 9.216e-06, "loss": 0.2369, "step": 3075 }, { "epoch": 17.281690140845072, "grad_norm": 1.1240075826644897, "learning_rate": 9.219000000000001e-06, "loss": 0.2027, "step": 3076 }, { "epoch": 17.28732394366197, "grad_norm": 0.856026828289032, "learning_rate": 9.222e-06, "loss": 0.2299, "step": 3077 }, { "epoch": 17.292957746478873, "grad_norm": 0.5991618037223816, "learning_rate": 9.225e-06, "loss": 0.1637, "step": 3078 }, { "epoch": 17.298591549295775, "grad_norm": 0.7233130931854248, "learning_rate": 9.228e-06, "loss": 0.1434, "step": 3079 }, { "epoch": 17.304225352112677, "grad_norm": 0.764525830745697, "learning_rate": 9.231e-06, "loss": 0.1741, "step": 3080 }, { "epoch": 17.309859154929576, "grad_norm": 0.7620739936828613, "learning_rate": 9.234e-06, "loss": 0.1912, "step": 3081 }, { "epoch": 17.31549295774648, "grad_norm": 0.837080180644989, "learning_rate": 9.237e-06, "loss": 0.1441, "step": 3082 }, { "epoch": 17.32112676056338, "grad_norm": 0.6409103274345398, "learning_rate": 9.24e-06, "loss": 0.1465, "step": 3083 }, { "epoch": 17.326760563380283, "grad_norm": 0.8394469618797302, "learning_rate": 9.243e-06, "loss": 0.1688, "step": 3084 }, { "epoch": 17.33239436619718, "grad_norm": 1.0458965301513672, "learning_rate": 9.246e-06, "loss": 0.2189, "step": 3085 }, { "epoch": 17.338028169014084, "grad_norm": 0.6240298748016357, "learning_rate": 9.249e-06, "loss": 0.1231, "step": 3086 }, { "epoch": 17.343661971830986, "grad_norm": 0.8952569961547852, "learning_rate": 9.252e-06, "loss": 0.1108, "step": 3087 }, { "epoch": 17.34929577464789, "grad_norm": 0.5902819633483887, "learning_rate": 9.255e-06, "loss": 0.0781, "step": 3088 }, { "epoch": 17.354929577464787, "grad_norm": 0.6178258657455444, "learning_rate": 9.258e-06, "loss": 0.1205, "step": 3089 }, { "epoch": 17.36056338028169, "grad_norm": 0.6218576431274414, "learning_rate": 9.261e-06, "loss": 0.1032, "step": 3090 }, { "epoch": 17.366197183098592, "grad_norm": 0.7220010757446289, "learning_rate": 9.264000000000001e-06, "loss": 0.1061, "step": 3091 }, { "epoch": 17.371830985915494, "grad_norm": 0.7110081315040588, "learning_rate": 9.267000000000001e-06, "loss": 0.1245, "step": 3092 }, { "epoch": 17.377464788732393, "grad_norm": 0.710464596748352, "learning_rate": 9.27e-06, "loss": 0.0951, "step": 3093 }, { "epoch": 17.383098591549295, "grad_norm": 0.662023663520813, "learning_rate": 9.272999999999999e-06, "loss": 0.1087, "step": 3094 }, { "epoch": 17.388732394366198, "grad_norm": 0.6141768097877502, "learning_rate": 9.275999999999999e-06, "loss": 0.0812, "step": 3095 }, { "epoch": 17.3943661971831, "grad_norm": 0.6518242359161377, "learning_rate": 9.279e-06, "loss": 0.0968, "step": 3096 }, { "epoch": 17.4, "grad_norm": 0.6796634793281555, "learning_rate": 9.282e-06, "loss": 0.0937, "step": 3097 }, { "epoch": 17.4056338028169, "grad_norm": 0.9904537796974182, "learning_rate": 9.285e-06, "loss": 0.1046, "step": 3098 }, { "epoch": 17.411267605633803, "grad_norm": 1.0456186532974243, "learning_rate": 9.288e-06, "loss": 0.0993, "step": 3099 }, { "epoch": 17.416901408450705, "grad_norm": 0.8419276475906372, "learning_rate": 9.291e-06, "loss": 0.0925, "step": 3100 }, { "epoch": 17.422535211267604, "grad_norm": 0.8421168327331543, "learning_rate": 9.294000000000001e-06, "loss": 0.084, "step": 3101 }, { "epoch": 17.428169014084506, "grad_norm": 0.9186273813247681, "learning_rate": 9.297000000000001e-06, "loss": 0.0545, "step": 3102 }, { "epoch": 17.43380281690141, "grad_norm": 0.9772595763206482, "learning_rate": 9.3e-06, "loss": 0.1268, "step": 3103 }, { "epoch": 17.43943661971831, "grad_norm": 0.6360768675804138, "learning_rate": 9.303e-06, "loss": 0.1176, "step": 3104 }, { "epoch": 17.44507042253521, "grad_norm": 0.5123996138572693, "learning_rate": 9.306e-06, "loss": 0.0552, "step": 3105 }, { "epoch": 17.450704225352112, "grad_norm": 0.831864058971405, "learning_rate": 9.309e-06, "loss": 0.1003, "step": 3106 }, { "epoch": 17.456338028169014, "grad_norm": 0.7811830043792725, "learning_rate": 9.312e-06, "loss": 0.1183, "step": 3107 }, { "epoch": 17.461971830985917, "grad_norm": 0.7511170506477356, "learning_rate": 9.315e-06, "loss": 0.0718, "step": 3108 }, { "epoch": 17.467605633802815, "grad_norm": 1.3382049798965454, "learning_rate": 9.318e-06, "loss": 0.0664, "step": 3109 }, { "epoch": 17.473239436619718, "grad_norm": 0.7183825373649597, "learning_rate": 9.321e-06, "loss": 0.0627, "step": 3110 }, { "epoch": 17.47887323943662, "grad_norm": 0.804050862789154, "learning_rate": 9.324000000000001e-06, "loss": 0.0716, "step": 3111 }, { "epoch": 17.484507042253522, "grad_norm": 0.6093230843544006, "learning_rate": 9.327e-06, "loss": 0.0816, "step": 3112 }, { "epoch": 17.49014084507042, "grad_norm": 0.9843234419822693, "learning_rate": 9.33e-06, "loss": 0.1125, "step": 3113 }, { "epoch": 17.495774647887323, "grad_norm": 1.0596399307250977, "learning_rate": 9.333e-06, "loss": 0.0794, "step": 3114 }, { "epoch": 17.501408450704226, "grad_norm": 1.1668074131011963, "learning_rate": 9.336e-06, "loss": 0.307, "step": 3115 }, { "epoch": 17.507042253521128, "grad_norm": 0.926954448223114, "learning_rate": 9.339000000000002e-06, "loss": 0.2268, "step": 3116 }, { "epoch": 17.512676056338027, "grad_norm": 0.9162659049034119, "learning_rate": 9.342000000000001e-06, "loss": 0.2239, "step": 3117 }, { "epoch": 17.51830985915493, "grad_norm": 1.073352575302124, "learning_rate": 9.345e-06, "loss": 0.1858, "step": 3118 }, { "epoch": 17.52394366197183, "grad_norm": 0.8319149017333984, "learning_rate": 9.348e-06, "loss": 0.2183, "step": 3119 }, { "epoch": 17.529577464788733, "grad_norm": 0.8098915219306946, "learning_rate": 9.350999999999999e-06, "loss": 0.1739, "step": 3120 }, { "epoch": 17.535211267605632, "grad_norm": 0.8036386370658875, "learning_rate": 9.354e-06, "loss": 0.1903, "step": 3121 }, { "epoch": 17.540845070422534, "grad_norm": 0.9611712694168091, "learning_rate": 9.357e-06, "loss": 0.2093, "step": 3122 }, { "epoch": 17.546478873239437, "grad_norm": 0.7244294285774231, "learning_rate": 9.36e-06, "loss": 0.1483, "step": 3123 }, { "epoch": 17.55211267605634, "grad_norm": 0.6682386994361877, "learning_rate": 9.363e-06, "loss": 0.1374, "step": 3124 }, { "epoch": 17.557746478873238, "grad_norm": 0.7490290999412537, "learning_rate": 9.366e-06, "loss": 0.1308, "step": 3125 }, { "epoch": 17.56338028169014, "grad_norm": 0.6992974281311035, "learning_rate": 9.369000000000001e-06, "loss": 0.1663, "step": 3126 }, { "epoch": 17.569014084507042, "grad_norm": 0.9411060214042664, "learning_rate": 9.372000000000001e-06, "loss": 0.1854, "step": 3127 }, { "epoch": 17.574647887323945, "grad_norm": 0.8281729817390442, "learning_rate": 9.375000000000001e-06, "loss": 0.1534, "step": 3128 }, { "epoch": 17.580281690140843, "grad_norm": 0.9498131275177002, "learning_rate": 9.378e-06, "loss": 0.2074, "step": 3129 }, { "epoch": 17.585915492957746, "grad_norm": 0.8109148740768433, "learning_rate": 9.380999999999999e-06, "loss": 0.1397, "step": 3130 }, { "epoch": 17.591549295774648, "grad_norm": 0.756504237651825, "learning_rate": 9.384e-06, "loss": 0.111, "step": 3131 }, { "epoch": 17.59718309859155, "grad_norm": 0.7364681363105774, "learning_rate": 9.387e-06, "loss": 0.1114, "step": 3132 }, { "epoch": 17.60281690140845, "grad_norm": 0.6548569798469543, "learning_rate": 9.39e-06, "loss": 0.1021, "step": 3133 }, { "epoch": 17.60845070422535, "grad_norm": 0.8425253033638, "learning_rate": 9.393e-06, "loss": 0.1315, "step": 3134 }, { "epoch": 17.614084507042254, "grad_norm": 0.7352956533432007, "learning_rate": 9.396e-06, "loss": 0.1492, "step": 3135 }, { "epoch": 17.619718309859156, "grad_norm": 0.9405946731567383, "learning_rate": 9.399000000000001e-06, "loss": 0.1347, "step": 3136 }, { "epoch": 17.625352112676055, "grad_norm": 1.8065040111541748, "learning_rate": 9.402e-06, "loss": 0.1229, "step": 3137 }, { "epoch": 17.630985915492957, "grad_norm": 1.0555658340454102, "learning_rate": 9.405e-06, "loss": 0.0839, "step": 3138 }, { "epoch": 17.63661971830986, "grad_norm": 0.5702683925628662, "learning_rate": 9.408e-06, "loss": 0.089, "step": 3139 }, { "epoch": 17.64225352112676, "grad_norm": 1.0419154167175293, "learning_rate": 9.411e-06, "loss": 0.1016, "step": 3140 }, { "epoch": 17.647887323943664, "grad_norm": 1.39485764503479, "learning_rate": 9.414000000000002e-06, "loss": 0.1388, "step": 3141 }, { "epoch": 17.653521126760563, "grad_norm": 0.5333805680274963, "learning_rate": 9.417e-06, "loss": 0.0752, "step": 3142 }, { "epoch": 17.659154929577465, "grad_norm": 0.810979962348938, "learning_rate": 9.42e-06, "loss": 0.096, "step": 3143 }, { "epoch": 17.664788732394367, "grad_norm": 0.6484115719795227, "learning_rate": 9.423e-06, "loss": 0.1124, "step": 3144 }, { "epoch": 17.670422535211266, "grad_norm": 0.5857322812080383, "learning_rate": 9.426e-06, "loss": 0.0544, "step": 3145 }, { "epoch": 17.676056338028168, "grad_norm": 0.7417637705802917, "learning_rate": 9.429e-06, "loss": 0.1129, "step": 3146 }, { "epoch": 17.68169014084507, "grad_norm": 0.8847521543502808, "learning_rate": 9.432e-06, "loss": 0.1222, "step": 3147 }, { "epoch": 17.687323943661973, "grad_norm": 0.7747939229011536, "learning_rate": 9.435e-06, "loss": 0.0907, "step": 3148 }, { "epoch": 17.692957746478875, "grad_norm": 0.612285852432251, "learning_rate": 9.438e-06, "loss": 0.0511, "step": 3149 }, { "epoch": 17.698591549295774, "grad_norm": 1.0581252574920654, "learning_rate": 9.441e-06, "loss": 0.0899, "step": 3150 }, { "epoch": 17.704225352112676, "grad_norm": 0.7872751355171204, "learning_rate": 9.444000000000001e-06, "loss": 0.0909, "step": 3151 }, { "epoch": 17.70985915492958, "grad_norm": 0.6201579570770264, "learning_rate": 9.447000000000001e-06, "loss": 0.0663, "step": 3152 }, { "epoch": 17.71549295774648, "grad_norm": 1.629888653755188, "learning_rate": 9.450000000000001e-06, "loss": 0.1102, "step": 3153 }, { "epoch": 17.72112676056338, "grad_norm": 0.751798689365387, "learning_rate": 9.453e-06, "loss": 0.1226, "step": 3154 }, { "epoch": 17.72676056338028, "grad_norm": 0.6360242962837219, "learning_rate": 9.455999999999999e-06, "loss": 0.0523, "step": 3155 }, { "epoch": 17.732394366197184, "grad_norm": 0.7026553153991699, "learning_rate": 9.459e-06, "loss": 0.0543, "step": 3156 }, { "epoch": 17.738028169014086, "grad_norm": 1.1090770959854126, "learning_rate": 9.462e-06, "loss": 0.1296, "step": 3157 }, { "epoch": 17.743661971830985, "grad_norm": 0.6803451776504517, "learning_rate": 9.465e-06, "loss": 0.062, "step": 3158 }, { "epoch": 17.749295774647887, "grad_norm": 0.9780817627906799, "learning_rate": 9.468e-06, "loss": 0.3037, "step": 3159 }, { "epoch": 17.75492957746479, "grad_norm": 0.999480128288269, "learning_rate": 9.471e-06, "loss": 0.2432, "step": 3160 }, { "epoch": 17.760563380281692, "grad_norm": 1.3164094686508179, "learning_rate": 9.474000000000001e-06, "loss": 0.2227, "step": 3161 }, { "epoch": 17.76619718309859, "grad_norm": 0.888710618019104, "learning_rate": 9.477000000000001e-06, "loss": 0.2417, "step": 3162 }, { "epoch": 17.771830985915493, "grad_norm": 1.0191129446029663, "learning_rate": 9.48e-06, "loss": 0.2182, "step": 3163 }, { "epoch": 17.777464788732395, "grad_norm": 1.0171465873718262, "learning_rate": 9.483e-06, "loss": 0.1728, "step": 3164 }, { "epoch": 17.783098591549297, "grad_norm": 1.0090699195861816, "learning_rate": 9.486e-06, "loss": 0.1972, "step": 3165 }, { "epoch": 17.788732394366196, "grad_norm": 0.9489573836326599, "learning_rate": 9.489000000000002e-06, "loss": 0.2113, "step": 3166 }, { "epoch": 17.7943661971831, "grad_norm": 1.034713625907898, "learning_rate": 9.492e-06, "loss": 0.1687, "step": 3167 }, { "epoch": 17.8, "grad_norm": 0.7751023173332214, "learning_rate": 9.495e-06, "loss": 0.1813, "step": 3168 }, { "epoch": 17.805633802816903, "grad_norm": 0.7371370196342468, "learning_rate": 9.498e-06, "loss": 0.1589, "step": 3169 }, { "epoch": 17.8112676056338, "grad_norm": 1.1365973949432373, "learning_rate": 9.501e-06, "loss": 0.1432, "step": 3170 }, { "epoch": 17.816901408450704, "grad_norm": 0.9198344349861145, "learning_rate": 9.504e-06, "loss": 0.1587, "step": 3171 }, { "epoch": 17.822535211267606, "grad_norm": 1.3550646305084229, "learning_rate": 9.507e-06, "loss": 0.1578, "step": 3172 }, { "epoch": 17.82816901408451, "grad_norm": 1.2160855531692505, "learning_rate": 9.51e-06, "loss": 0.145, "step": 3173 }, { "epoch": 17.833802816901407, "grad_norm": 1.1365647315979004, "learning_rate": 9.513e-06, "loss": 0.1665, "step": 3174 }, { "epoch": 17.83943661971831, "grad_norm": 0.701583743095398, "learning_rate": 9.516e-06, "loss": 0.1263, "step": 3175 }, { "epoch": 17.845070422535212, "grad_norm": 6.048886299133301, "learning_rate": 9.519000000000002e-06, "loss": 0.1485, "step": 3176 }, { "epoch": 17.850704225352114, "grad_norm": 1.6768730878829956, "learning_rate": 9.522000000000001e-06, "loss": 0.0841, "step": 3177 }, { "epoch": 17.856338028169013, "grad_norm": 0.801356315612793, "learning_rate": 9.525000000000001e-06, "loss": 0.1463, "step": 3178 }, { "epoch": 17.861971830985915, "grad_norm": 0.8703104853630066, "learning_rate": 9.528e-06, "loss": 0.1502, "step": 3179 }, { "epoch": 17.867605633802818, "grad_norm": 0.874949038028717, "learning_rate": 9.530999999999999e-06, "loss": 0.1455, "step": 3180 }, { "epoch": 17.87323943661972, "grad_norm": 0.7059535980224609, "learning_rate": 9.534e-06, "loss": 0.0998, "step": 3181 }, { "epoch": 17.87887323943662, "grad_norm": 0.7792074084281921, "learning_rate": 9.537e-06, "loss": 0.1012, "step": 3182 }, { "epoch": 17.88450704225352, "grad_norm": 0.9083050489425659, "learning_rate": 9.54e-06, "loss": 0.1156, "step": 3183 }, { "epoch": 17.890140845070423, "grad_norm": 1.0374540090560913, "learning_rate": 9.543e-06, "loss": 0.1365, "step": 3184 }, { "epoch": 17.895774647887325, "grad_norm": 0.8366495370864868, "learning_rate": 9.546e-06, "loss": 0.0957, "step": 3185 }, { "epoch": 17.901408450704224, "grad_norm": 0.8457148671150208, "learning_rate": 9.549000000000001e-06, "loss": 0.0873, "step": 3186 }, { "epoch": 17.907042253521126, "grad_norm": 1.1334501504898071, "learning_rate": 9.552000000000001e-06, "loss": 0.101, "step": 3187 }, { "epoch": 17.91267605633803, "grad_norm": 0.7135744094848633, "learning_rate": 9.555e-06, "loss": 0.1482, "step": 3188 }, { "epoch": 17.91830985915493, "grad_norm": 0.8089808821678162, "learning_rate": 9.558e-06, "loss": 0.0823, "step": 3189 }, { "epoch": 17.92394366197183, "grad_norm": 1.3953118324279785, "learning_rate": 9.561e-06, "loss": 0.1871, "step": 3190 }, { "epoch": 17.929577464788732, "grad_norm": 0.7169233560562134, "learning_rate": 9.564e-06, "loss": 0.0685, "step": 3191 }, { "epoch": 17.935211267605634, "grad_norm": 0.9937434196472168, "learning_rate": 9.567e-06, "loss": 0.1426, "step": 3192 }, { "epoch": 17.940845070422537, "grad_norm": 0.7215182185173035, "learning_rate": 9.57e-06, "loss": 0.1106, "step": 3193 }, { "epoch": 17.946478873239435, "grad_norm": 0.7243179678916931, "learning_rate": 9.573e-06, "loss": 0.0965, "step": 3194 }, { "epoch": 17.952112676056338, "grad_norm": 0.6793733835220337, "learning_rate": 9.576e-06, "loss": 0.0697, "step": 3195 }, { "epoch": 17.95774647887324, "grad_norm": 0.7866185307502747, "learning_rate": 9.579e-06, "loss": 0.0856, "step": 3196 }, { "epoch": 17.963380281690142, "grad_norm": 0.7419630289077759, "learning_rate": 9.582e-06, "loss": 0.1204, "step": 3197 }, { "epoch": 17.96901408450704, "grad_norm": 0.7434686422348022, "learning_rate": 9.585e-06, "loss": 0.0783, "step": 3198 }, { "epoch": 17.974647887323943, "grad_norm": 0.5718957781791687, "learning_rate": 9.588e-06, "loss": 0.0676, "step": 3199 }, { "epoch": 17.980281690140846, "grad_norm": 0.5486510992050171, "learning_rate": 9.591e-06, "loss": 0.0378, "step": 3200 }, { "epoch": 17.985915492957748, "grad_norm": 0.7870190143585205, "learning_rate": 9.594e-06, "loss": 0.0542, "step": 3201 }, { "epoch": 17.991549295774647, "grad_norm": 2.622627019882202, "learning_rate": 9.597000000000001e-06, "loss": 0.06, "step": 3202 }, { "epoch": 17.99718309859155, "grad_norm": 0.7754027247428894, "learning_rate": 9.600000000000001e-06, "loss": 0.1476, "step": 3203 }, { "epoch": 18.0, "grad_norm": 0.5241531729698181, "learning_rate": 9.603e-06, "loss": 0.0323, "step": 3204 }, { "epoch": 18.005633802816902, "grad_norm": 1.269458293914795, "learning_rate": 9.606e-06, "loss": 0.3059, "step": 3205 }, { "epoch": 18.011267605633805, "grad_norm": 0.7945778369903564, "learning_rate": 9.608999999999999e-06, "loss": 0.2066, "step": 3206 }, { "epoch": 18.016901408450703, "grad_norm": 0.8700370192527771, "learning_rate": 9.612e-06, "loss": 0.2276, "step": 3207 }, { "epoch": 18.022535211267606, "grad_norm": 0.7708240151405334, "learning_rate": 9.615e-06, "loss": 0.1701, "step": 3208 }, { "epoch": 18.028169014084508, "grad_norm": 1.592963695526123, "learning_rate": 9.618e-06, "loss": 0.165, "step": 3209 }, { "epoch": 18.03380281690141, "grad_norm": 0.8360375761985779, "learning_rate": 9.621e-06, "loss": 0.2075, "step": 3210 }, { "epoch": 18.03943661971831, "grad_norm": 0.6861481070518494, "learning_rate": 9.624e-06, "loss": 0.1527, "step": 3211 }, { "epoch": 18.04507042253521, "grad_norm": 0.8290767669677734, "learning_rate": 9.627000000000001e-06, "loss": 0.2261, "step": 3212 }, { "epoch": 18.050704225352113, "grad_norm": 0.9234678745269775, "learning_rate": 9.630000000000001e-06, "loss": 0.1878, "step": 3213 }, { "epoch": 18.056338028169016, "grad_norm": 0.6841479539871216, "learning_rate": 9.633e-06, "loss": 0.1165, "step": 3214 }, { "epoch": 18.061971830985915, "grad_norm": 0.7538576126098633, "learning_rate": 9.636e-06, "loss": 0.1702, "step": 3215 }, { "epoch": 18.067605633802817, "grad_norm": 0.815049409866333, "learning_rate": 9.638999999999999e-06, "loss": 0.1535, "step": 3216 }, { "epoch": 18.07323943661972, "grad_norm": 0.9226198196411133, "learning_rate": 9.642e-06, "loss": 0.1697, "step": 3217 }, { "epoch": 18.07887323943662, "grad_norm": 1.0882232189178467, "learning_rate": 9.645e-06, "loss": 0.1779, "step": 3218 }, { "epoch": 18.08450704225352, "grad_norm": 0.6835092306137085, "learning_rate": 9.648e-06, "loss": 0.1152, "step": 3219 }, { "epoch": 18.090140845070422, "grad_norm": 0.6113819479942322, "learning_rate": 9.651e-06, "loss": 0.1323, "step": 3220 }, { "epoch": 18.095774647887325, "grad_norm": 0.7105637788772583, "learning_rate": 9.654e-06, "loss": 0.1001, "step": 3221 }, { "epoch": 18.101408450704227, "grad_norm": 1.5997202396392822, "learning_rate": 9.657000000000001e-06, "loss": 0.1319, "step": 3222 }, { "epoch": 18.107042253521126, "grad_norm": 0.5694540143013, "learning_rate": 9.66e-06, "loss": 0.0856, "step": 3223 }, { "epoch": 18.112676056338028, "grad_norm": 0.8081299066543579, "learning_rate": 9.663e-06, "loss": 0.1023, "step": 3224 }, { "epoch": 18.11830985915493, "grad_norm": 0.7110785245895386, "learning_rate": 9.666e-06, "loss": 0.1179, "step": 3225 }, { "epoch": 18.123943661971833, "grad_norm": 0.8305751085281372, "learning_rate": 9.669e-06, "loss": 0.1173, "step": 3226 }, { "epoch": 18.12957746478873, "grad_norm": 0.818979024887085, "learning_rate": 9.672000000000002e-06, "loss": 0.1014, "step": 3227 }, { "epoch": 18.135211267605634, "grad_norm": 0.765323281288147, "learning_rate": 9.675e-06, "loss": 0.096, "step": 3228 }, { "epoch": 18.140845070422536, "grad_norm": 0.703900158405304, "learning_rate": 9.678e-06, "loss": 0.0922, "step": 3229 }, { "epoch": 18.146478873239438, "grad_norm": 1.7893223762512207, "learning_rate": 9.681e-06, "loss": 0.0984, "step": 3230 }, { "epoch": 18.152112676056337, "grad_norm": 0.9573982357978821, "learning_rate": 9.683999999999999e-06, "loss": 0.1035, "step": 3231 }, { "epoch": 18.15774647887324, "grad_norm": 0.9057571291923523, "learning_rate": 9.687e-06, "loss": 0.0951, "step": 3232 }, { "epoch": 18.16338028169014, "grad_norm": 0.6090024709701538, "learning_rate": 9.69e-06, "loss": 0.0636, "step": 3233 }, { "epoch": 18.169014084507044, "grad_norm": 0.7713419795036316, "learning_rate": 9.693e-06, "loss": 0.1046, "step": 3234 }, { "epoch": 18.174647887323943, "grad_norm": 2.731121301651001, "learning_rate": 9.696e-06, "loss": 0.084, "step": 3235 }, { "epoch": 18.180281690140845, "grad_norm": 0.7414133548736572, "learning_rate": 9.699e-06, "loss": 0.1068, "step": 3236 }, { "epoch": 18.185915492957747, "grad_norm": 0.681007444858551, "learning_rate": 9.702000000000001e-06, "loss": 0.0826, "step": 3237 }, { "epoch": 18.19154929577465, "grad_norm": 0.6044124960899353, "learning_rate": 9.705000000000001e-06, "loss": 0.1078, "step": 3238 }, { "epoch": 18.197183098591548, "grad_norm": 0.5845127105712891, "learning_rate": 9.708000000000001e-06, "loss": 0.1198, "step": 3239 }, { "epoch": 18.20281690140845, "grad_norm": 2.188575029373169, "learning_rate": 9.711e-06, "loss": 0.1129, "step": 3240 }, { "epoch": 18.208450704225353, "grad_norm": 0.6723904013633728, "learning_rate": 9.713999999999999e-06, "loss": 0.0873, "step": 3241 }, { "epoch": 18.214084507042255, "grad_norm": 0.6954381465911865, "learning_rate": 9.717e-06, "loss": 0.0487, "step": 3242 }, { "epoch": 18.219718309859154, "grad_norm": 0.8770313262939453, "learning_rate": 9.72e-06, "loss": 0.0765, "step": 3243 }, { "epoch": 18.225352112676056, "grad_norm": 0.7259297370910645, "learning_rate": 9.723e-06, "loss": 0.0921, "step": 3244 }, { "epoch": 18.23098591549296, "grad_norm": 0.6169613003730774, "learning_rate": 9.726e-06, "loss": 0.0463, "step": 3245 }, { "epoch": 18.23661971830986, "grad_norm": 0.7343481779098511, "learning_rate": 9.729e-06, "loss": 0.0583, "step": 3246 }, { "epoch": 18.24225352112676, "grad_norm": 1.1153725385665894, "learning_rate": 9.732000000000001e-06, "loss": 0.0727, "step": 3247 }, { "epoch": 18.24788732394366, "grad_norm": 0.8395434617996216, "learning_rate": 9.735e-06, "loss": 0.0782, "step": 3248 }, { "epoch": 18.253521126760564, "grad_norm": 1.3611925840377808, "learning_rate": 9.738e-06, "loss": 0.2538, "step": 3249 }, { "epoch": 18.259154929577466, "grad_norm": 1.4467244148254395, "learning_rate": 9.741e-06, "loss": 0.2469, "step": 3250 }, { "epoch": 18.264788732394365, "grad_norm": 0.7944283485412598, "learning_rate": 9.744e-06, "loss": 0.2456, "step": 3251 }, { "epoch": 18.270422535211267, "grad_norm": 0.7862672805786133, "learning_rate": 9.747000000000002e-06, "loss": 0.1978, "step": 3252 }, { "epoch": 18.27605633802817, "grad_norm": 0.950491726398468, "learning_rate": 9.75e-06, "loss": 0.1834, "step": 3253 }, { "epoch": 18.281690140845072, "grad_norm": 1.0952248573303223, "learning_rate": 9.753e-06, "loss": 0.1968, "step": 3254 }, { "epoch": 18.28732394366197, "grad_norm": 1.1655362844467163, "learning_rate": 9.756e-06, "loss": 0.1951, "step": 3255 }, { "epoch": 18.292957746478873, "grad_norm": 0.6711989641189575, "learning_rate": 9.759e-06, "loss": 0.1292, "step": 3256 }, { "epoch": 18.298591549295775, "grad_norm": 0.7248563170433044, "learning_rate": 9.762e-06, "loss": 0.2123, "step": 3257 }, { "epoch": 18.304225352112677, "grad_norm": 0.6906742453575134, "learning_rate": 9.765e-06, "loss": 0.1426, "step": 3258 }, { "epoch": 18.309859154929576, "grad_norm": 0.7631933689117432, "learning_rate": 9.768e-06, "loss": 0.1723, "step": 3259 }, { "epoch": 18.31549295774648, "grad_norm": 0.8643407225608826, "learning_rate": 9.771e-06, "loss": 0.1483, "step": 3260 }, { "epoch": 18.32112676056338, "grad_norm": 0.7693324089050293, "learning_rate": 9.774e-06, "loss": 0.1754, "step": 3261 }, { "epoch": 18.326760563380283, "grad_norm": 0.9951642155647278, "learning_rate": 9.777000000000001e-06, "loss": 0.1746, "step": 3262 }, { "epoch": 18.33239436619718, "grad_norm": 0.7490482926368713, "learning_rate": 9.780000000000001e-06, "loss": 0.1733, "step": 3263 }, { "epoch": 18.338028169014084, "grad_norm": 0.634590744972229, "learning_rate": 9.783000000000001e-06, "loss": 0.1274, "step": 3264 }, { "epoch": 18.343661971830986, "grad_norm": 0.6835691332817078, "learning_rate": 9.785999999999999e-06, "loss": 0.0816, "step": 3265 }, { "epoch": 18.34929577464789, "grad_norm": 0.8644260168075562, "learning_rate": 9.788999999999999e-06, "loss": 0.1481, "step": 3266 }, { "epoch": 18.354929577464787, "grad_norm": 0.6261890530586243, "learning_rate": 9.792e-06, "loss": 0.0947, "step": 3267 }, { "epoch": 18.36056338028169, "grad_norm": 0.6120619773864746, "learning_rate": 9.795e-06, "loss": 0.0858, "step": 3268 }, { "epoch": 18.366197183098592, "grad_norm": 0.7350469827651978, "learning_rate": 9.798e-06, "loss": 0.1204, "step": 3269 }, { "epoch": 18.371830985915494, "grad_norm": 0.7943240404129028, "learning_rate": 9.801e-06, "loss": 0.1141, "step": 3270 }, { "epoch": 18.377464788732393, "grad_norm": 1.40141761302948, "learning_rate": 9.804e-06, "loss": 0.0983, "step": 3271 }, { "epoch": 18.383098591549295, "grad_norm": 0.7233126163482666, "learning_rate": 9.807000000000001e-06, "loss": 0.0787, "step": 3272 }, { "epoch": 18.388732394366198, "grad_norm": 0.7157315611839294, "learning_rate": 9.810000000000001e-06, "loss": 0.1415, "step": 3273 }, { "epoch": 18.3943661971831, "grad_norm": 0.5907537341117859, "learning_rate": 9.813e-06, "loss": 0.0895, "step": 3274 }, { "epoch": 18.4, "grad_norm": 0.535987913608551, "learning_rate": 9.816e-06, "loss": 0.0879, "step": 3275 }, { "epoch": 18.4056338028169, "grad_norm": 0.7179861664772034, "learning_rate": 9.819e-06, "loss": 0.0669, "step": 3276 }, { "epoch": 18.411267605633803, "grad_norm": 0.8069932460784912, "learning_rate": 9.822e-06, "loss": 0.0653, "step": 3277 }, { "epoch": 18.416901408450705, "grad_norm": 0.665037989616394, "learning_rate": 9.825e-06, "loss": 0.0921, "step": 3278 }, { "epoch": 18.422535211267604, "grad_norm": 0.6428496241569519, "learning_rate": 9.828e-06, "loss": 0.1002, "step": 3279 }, { "epoch": 18.428169014084506, "grad_norm": 0.7896534204483032, "learning_rate": 9.831e-06, "loss": 0.0632, "step": 3280 }, { "epoch": 18.43380281690141, "grad_norm": 0.8997613191604614, "learning_rate": 9.834e-06, "loss": 0.1001, "step": 3281 }, { "epoch": 18.43943661971831, "grad_norm": 0.7366743683815002, "learning_rate": 9.837000000000001e-06, "loss": 0.0672, "step": 3282 }, { "epoch": 18.44507042253521, "grad_norm": 0.5513707995414734, "learning_rate": 9.84e-06, "loss": 0.0607, "step": 3283 }, { "epoch": 18.450704225352112, "grad_norm": 0.8257274031639099, "learning_rate": 9.843e-06, "loss": 0.0726, "step": 3284 }, { "epoch": 18.456338028169014, "grad_norm": 0.8795554637908936, "learning_rate": 9.846e-06, "loss": 0.0887, "step": 3285 }, { "epoch": 18.461971830985917, "grad_norm": 0.5398402214050293, "learning_rate": 9.849e-06, "loss": 0.0913, "step": 3286 }, { "epoch": 18.467605633802815, "grad_norm": 0.7293025255203247, "learning_rate": 9.852000000000002e-06, "loss": 0.0586, "step": 3287 }, { "epoch": 18.473239436619718, "grad_norm": 0.7090908288955688, "learning_rate": 9.855000000000001e-06, "loss": 0.1025, "step": 3288 }, { "epoch": 18.47887323943662, "grad_norm": 0.7053910493850708, "learning_rate": 9.858000000000001e-06, "loss": 0.0719, "step": 3289 }, { "epoch": 18.484507042253522, "grad_norm": 0.7725375294685364, "learning_rate": 9.861e-06, "loss": 0.052, "step": 3290 }, { "epoch": 18.49014084507042, "grad_norm": 0.8807919025421143, "learning_rate": 9.863999999999999e-06, "loss": 0.0588, "step": 3291 }, { "epoch": 18.495774647887323, "grad_norm": 1.027423620223999, "learning_rate": 9.867e-06, "loss": 0.0493, "step": 3292 }, { "epoch": 18.501408450704226, "grad_norm": 2.2779431343078613, "learning_rate": 9.87e-06, "loss": 0.3041, "step": 3293 }, { "epoch": 18.507042253521128, "grad_norm": 0.870711624622345, "learning_rate": 9.873e-06, "loss": 0.2134, "step": 3294 }, { "epoch": 18.512676056338027, "grad_norm": 1.37291419506073, "learning_rate": 9.876e-06, "loss": 0.1994, "step": 3295 }, { "epoch": 18.51830985915493, "grad_norm": 1.1035975217819214, "learning_rate": 9.879e-06, "loss": 0.2666, "step": 3296 }, { "epoch": 18.52394366197183, "grad_norm": 0.8429668545722961, "learning_rate": 9.882000000000001e-06, "loss": 0.1864, "step": 3297 }, { "epoch": 18.529577464788733, "grad_norm": 0.8311285376548767, "learning_rate": 9.885000000000001e-06, "loss": 0.1719, "step": 3298 }, { "epoch": 18.535211267605632, "grad_norm": 0.6793307662010193, "learning_rate": 9.888000000000001e-06, "loss": 0.1522, "step": 3299 }, { "epoch": 18.540845070422534, "grad_norm": 0.7280133366584778, "learning_rate": 9.891e-06, "loss": 0.1948, "step": 3300 }, { "epoch": 18.546478873239437, "grad_norm": 0.7058970332145691, "learning_rate": 9.894e-06, "loss": 0.166, "step": 3301 }, { "epoch": 18.55211267605634, "grad_norm": 1.2566550970077515, "learning_rate": 9.897e-06, "loss": 0.1687, "step": 3302 }, { "epoch": 18.557746478873238, "grad_norm": 0.7633759379386902, "learning_rate": 9.9e-06, "loss": 0.1841, "step": 3303 }, { "epoch": 18.56338028169014, "grad_norm": 0.6372610926628113, "learning_rate": 9.903e-06, "loss": 0.1246, "step": 3304 }, { "epoch": 18.569014084507042, "grad_norm": 0.7474398612976074, "learning_rate": 9.906e-06, "loss": 0.183, "step": 3305 }, { "epoch": 18.574647887323945, "grad_norm": 0.6494360566139221, "learning_rate": 9.909e-06, "loss": 0.1107, "step": 3306 }, { "epoch": 18.580281690140843, "grad_norm": 0.6335179209709167, "learning_rate": 9.912000000000001e-06, "loss": 0.1112, "step": 3307 }, { "epoch": 18.585915492957746, "grad_norm": 0.7398079037666321, "learning_rate": 9.915e-06, "loss": 0.1557, "step": 3308 }, { "epoch": 18.591549295774648, "grad_norm": 0.6798459887504578, "learning_rate": 9.918e-06, "loss": 0.1024, "step": 3309 }, { "epoch": 18.59718309859155, "grad_norm": 0.5170331001281738, "learning_rate": 9.921e-06, "loss": 0.1116, "step": 3310 }, { "epoch": 18.60281690140845, "grad_norm": 0.8865971565246582, "learning_rate": 9.924e-06, "loss": 0.1407, "step": 3311 }, { "epoch": 18.60845070422535, "grad_norm": 0.5804907083511353, "learning_rate": 9.927000000000002e-06, "loss": 0.0803, "step": 3312 }, { "epoch": 18.614084507042254, "grad_norm": 0.8981394171714783, "learning_rate": 9.930000000000001e-06, "loss": 0.1658, "step": 3313 }, { "epoch": 18.619718309859156, "grad_norm": 0.8451980948448181, "learning_rate": 9.933e-06, "loss": 0.0973, "step": 3314 }, { "epoch": 18.625352112676055, "grad_norm": 1.0557599067687988, "learning_rate": 9.936e-06, "loss": 0.1482, "step": 3315 }, { "epoch": 18.630985915492957, "grad_norm": 0.5777811408042908, "learning_rate": 9.939e-06, "loss": 0.0946, "step": 3316 }, { "epoch": 18.63661971830986, "grad_norm": 0.8326600790023804, "learning_rate": 9.941999999999999e-06, "loss": 0.0893, "step": 3317 }, { "epoch": 18.64225352112676, "grad_norm": 0.6939487457275391, "learning_rate": 9.945e-06, "loss": 0.0753, "step": 3318 }, { "epoch": 18.647887323943664, "grad_norm": 0.7630655765533447, "learning_rate": 9.948e-06, "loss": 0.1442, "step": 3319 }, { "epoch": 18.653521126760563, "grad_norm": 0.5863944292068481, "learning_rate": 9.951e-06, "loss": 0.068, "step": 3320 }, { "epoch": 18.659154929577465, "grad_norm": 0.5785934329032898, "learning_rate": 9.954e-06, "loss": 0.0697, "step": 3321 }, { "epoch": 18.664788732394367, "grad_norm": 1.320989727973938, "learning_rate": 9.957e-06, "loss": 0.1476, "step": 3322 }, { "epoch": 18.670422535211266, "grad_norm": 0.8722823262214661, "learning_rate": 9.960000000000001e-06, "loss": 0.0572, "step": 3323 }, { "epoch": 18.676056338028168, "grad_norm": 0.7000361680984497, "learning_rate": 9.963000000000001e-06, "loss": 0.0899, "step": 3324 }, { "epoch": 18.68169014084507, "grad_norm": 0.9498292207717896, "learning_rate": 9.966e-06, "loss": 0.1106, "step": 3325 }, { "epoch": 18.687323943661973, "grad_norm": 0.8435276746749878, "learning_rate": 9.969e-06, "loss": 0.1122, "step": 3326 }, { "epoch": 18.692957746478875, "grad_norm": 1.0950429439544678, "learning_rate": 9.971999999999999e-06, "loss": 0.0853, "step": 3327 }, { "epoch": 18.698591549295774, "grad_norm": 0.7337435483932495, "learning_rate": 9.975e-06, "loss": 0.0806, "step": 3328 }, { "epoch": 18.704225352112676, "grad_norm": 0.6539646983146667, "learning_rate": 9.978e-06, "loss": 0.0955, "step": 3329 }, { "epoch": 18.70985915492958, "grad_norm": 0.521691083908081, "learning_rate": 9.981e-06, "loss": 0.0631, "step": 3330 }, { "epoch": 18.71549295774648, "grad_norm": 0.5943522453308105, "learning_rate": 9.984e-06, "loss": 0.0704, "step": 3331 }, { "epoch": 18.72112676056338, "grad_norm": 0.6801335215568542, "learning_rate": 9.987e-06, "loss": 0.0957, "step": 3332 }, { "epoch": 18.72676056338028, "grad_norm": 0.6314895153045654, "learning_rate": 9.990000000000001e-06, "loss": 0.0516, "step": 3333 }, { "epoch": 18.732394366197184, "grad_norm": 0.7910265326499939, "learning_rate": 9.993e-06, "loss": 0.0478, "step": 3334 }, { "epoch": 18.738028169014086, "grad_norm": 0.8184069395065308, "learning_rate": 9.996e-06, "loss": 0.1348, "step": 3335 }, { "epoch": 18.743661971830985, "grad_norm": 0.7496561408042908, "learning_rate": 9.999e-06, "loss": 0.0404, "step": 3336 }, { "epoch": 18.749295774647887, "grad_norm": 1.733107089996338, "learning_rate": 1.0002e-05, "loss": 0.2459, "step": 3337 }, { "epoch": 18.75492957746479, "grad_norm": 0.7369199991226196, "learning_rate": 1.0005000000000002e-05, "loss": 0.1934, "step": 3338 }, { "epoch": 18.760563380281692, "grad_norm": 0.9573494791984558, "learning_rate": 1.0008e-05, "loss": 0.1852, "step": 3339 }, { "epoch": 18.76619718309859, "grad_norm": 0.9929550886154175, "learning_rate": 1.0011e-05, "loss": 0.2133, "step": 3340 }, { "epoch": 18.771830985915493, "grad_norm": 0.7138442397117615, "learning_rate": 1.0014e-05, "loss": 0.2035, "step": 3341 }, { "epoch": 18.777464788732395, "grad_norm": 0.7713431119918823, "learning_rate": 1.0016999999999999e-05, "loss": 0.2137, "step": 3342 }, { "epoch": 18.783098591549297, "grad_norm": 0.9603472948074341, "learning_rate": 1.002e-05, "loss": 0.2047, "step": 3343 }, { "epoch": 18.788732394366196, "grad_norm": 0.8325052261352539, "learning_rate": 1.0023e-05, "loss": 0.1732, "step": 3344 }, { "epoch": 18.7943661971831, "grad_norm": 0.8016058802604675, "learning_rate": 1.0026e-05, "loss": 0.1458, "step": 3345 }, { "epoch": 18.8, "grad_norm": 0.6749951243400574, "learning_rate": 1.0029e-05, "loss": 0.1266, "step": 3346 }, { "epoch": 18.805633802816903, "grad_norm": 0.7031674981117249, "learning_rate": 1.0032e-05, "loss": 0.1673, "step": 3347 }, { "epoch": 18.8112676056338, "grad_norm": 0.6678649187088013, "learning_rate": 1.0035000000000001e-05, "loss": 0.1286, "step": 3348 }, { "epoch": 18.816901408450704, "grad_norm": 0.7292724251747131, "learning_rate": 1.0038000000000001e-05, "loss": 0.1575, "step": 3349 }, { "epoch": 18.822535211267606, "grad_norm": 0.6296169757843018, "learning_rate": 1.0041000000000001e-05, "loss": 0.1256, "step": 3350 }, { "epoch": 18.82816901408451, "grad_norm": 0.6858195066452026, "learning_rate": 1.0043999999999999e-05, "loss": 0.1096, "step": 3351 }, { "epoch": 18.833802816901407, "grad_norm": 0.6194732189178467, "learning_rate": 1.0046999999999999e-05, "loss": 0.1302, "step": 3352 }, { "epoch": 18.83943661971831, "grad_norm": 1.037428617477417, "learning_rate": 1.005e-05, "loss": 0.1608, "step": 3353 }, { "epoch": 18.845070422535212, "grad_norm": 0.6306082010269165, "learning_rate": 1.0053e-05, "loss": 0.0984, "step": 3354 }, { "epoch": 18.850704225352114, "grad_norm": 1.492693543434143, "learning_rate": 1.0056e-05, "loss": 0.1173, "step": 3355 }, { "epoch": 18.856338028169013, "grad_norm": 1.6225545406341553, "learning_rate": 1.0059e-05, "loss": 0.1233, "step": 3356 }, { "epoch": 18.861971830985915, "grad_norm": 0.758178174495697, "learning_rate": 1.0062e-05, "loss": 0.1074, "step": 3357 }, { "epoch": 18.867605633802818, "grad_norm": 0.5719346404075623, "learning_rate": 1.0065000000000001e-05, "loss": 0.0966, "step": 3358 }, { "epoch": 18.87323943661972, "grad_norm": 0.6541097164154053, "learning_rate": 1.0068e-05, "loss": 0.1283, "step": 3359 }, { "epoch": 18.87887323943662, "grad_norm": 0.7087709903717041, "learning_rate": 1.0071e-05, "loss": 0.0965, "step": 3360 }, { "epoch": 18.88450704225352, "grad_norm": 0.5901370048522949, "learning_rate": 1.0074e-05, "loss": 0.0699, "step": 3361 }, { "epoch": 18.890140845070423, "grad_norm": 0.842788577079773, "learning_rate": 1.0077e-05, "loss": 0.0995, "step": 3362 }, { "epoch": 18.895774647887325, "grad_norm": 0.6988987326622009, "learning_rate": 1.008e-05, "loss": 0.0931, "step": 3363 }, { "epoch": 18.901408450704224, "grad_norm": 0.6050257086753845, "learning_rate": 1.0083e-05, "loss": 0.1259, "step": 3364 }, { "epoch": 18.907042253521126, "grad_norm": 0.5330042839050293, "learning_rate": 1.0086e-05, "loss": 0.0673, "step": 3365 }, { "epoch": 18.91267605633803, "grad_norm": 0.7171017527580261, "learning_rate": 1.0089e-05, "loss": 0.1054, "step": 3366 }, { "epoch": 18.91830985915493, "grad_norm": 0.8256053924560547, "learning_rate": 1.0092e-05, "loss": 0.0771, "step": 3367 }, { "epoch": 18.92394366197183, "grad_norm": 0.4292299151420593, "learning_rate": 1.0095e-05, "loss": 0.0439, "step": 3368 }, { "epoch": 18.929577464788732, "grad_norm": 0.6503453850746155, "learning_rate": 1.0098e-05, "loss": 0.1435, "step": 3369 }, { "epoch": 18.935211267605634, "grad_norm": 0.7273785471916199, "learning_rate": 1.0101e-05, "loss": 0.0997, "step": 3370 }, { "epoch": 18.940845070422537, "grad_norm": 0.5958623290061951, "learning_rate": 1.0104e-05, "loss": 0.083, "step": 3371 }, { "epoch": 18.946478873239435, "grad_norm": 0.6574357748031616, "learning_rate": 1.0107e-05, "loss": 0.0579, "step": 3372 }, { "epoch": 18.952112676056338, "grad_norm": 0.6258409023284912, "learning_rate": 1.0110000000000001e-05, "loss": 0.0815, "step": 3373 }, { "epoch": 18.95774647887324, "grad_norm": 0.47386881709098816, "learning_rate": 1.0113000000000001e-05, "loss": 0.046, "step": 3374 }, { "epoch": 18.963380281690142, "grad_norm": 0.9814865589141846, "learning_rate": 1.0116000000000001e-05, "loss": 0.1244, "step": 3375 }, { "epoch": 18.96901408450704, "grad_norm": 1.4406538009643555, "learning_rate": 1.0119e-05, "loss": 0.0935, "step": 3376 }, { "epoch": 18.974647887323943, "grad_norm": 0.7298793792724609, "learning_rate": 1.0121999999999999e-05, "loss": 0.0497, "step": 3377 }, { "epoch": 18.980281690140846, "grad_norm": 1.6318669319152832, "learning_rate": 1.0125e-05, "loss": 0.0825, "step": 3378 }, { "epoch": 18.985915492957748, "grad_norm": 0.5952221751213074, "learning_rate": 1.0128e-05, "loss": 0.1036, "step": 3379 }, { "epoch": 18.991549295774647, "grad_norm": 1.1615735292434692, "learning_rate": 1.0131e-05, "loss": 0.1088, "step": 3380 }, { "epoch": 18.99718309859155, "grad_norm": 1.3281440734863281, "learning_rate": 1.0134e-05, "loss": 0.1692, "step": 3381 }, { "epoch": 19.0, "grad_norm": 0.4922459125518799, "learning_rate": 1.0137e-05, "loss": 0.0178, "step": 3382 }, { "epoch": 19.005633802816902, "grad_norm": 0.848738431930542, "learning_rate": 1.0140000000000001e-05, "loss": 0.2132, "step": 3383 }, { "epoch": 19.011267605633805, "grad_norm": 0.8772138953208923, "learning_rate": 1.0143000000000001e-05, "loss": 0.2682, "step": 3384 }, { "epoch": 19.016901408450703, "grad_norm": 0.6131483316421509, "learning_rate": 1.0146e-05, "loss": 0.1719, "step": 3385 }, { "epoch": 19.022535211267606, "grad_norm": 0.7549182772636414, "learning_rate": 1.0149e-05, "loss": 0.1976, "step": 3386 }, { "epoch": 19.028169014084508, "grad_norm": 0.699554443359375, "learning_rate": 1.0152e-05, "loss": 0.2606, "step": 3387 }, { "epoch": 19.03380281690141, "grad_norm": 0.7473641037940979, "learning_rate": 1.0155e-05, "loss": 0.1764, "step": 3388 }, { "epoch": 19.03943661971831, "grad_norm": 0.6767587065696716, "learning_rate": 1.0158e-05, "loss": 0.1676, "step": 3389 }, { "epoch": 19.04507042253521, "grad_norm": 0.5755335092544556, "learning_rate": 1.0161e-05, "loss": 0.1218, "step": 3390 }, { "epoch": 19.050704225352113, "grad_norm": 0.6933912634849548, "learning_rate": 1.0164e-05, "loss": 0.151, "step": 3391 }, { "epoch": 19.056338028169016, "grad_norm": 0.8673139214515686, "learning_rate": 1.0167e-05, "loss": 0.1234, "step": 3392 }, { "epoch": 19.061971830985915, "grad_norm": 0.8832148313522339, "learning_rate": 1.0170000000000001e-05, "loss": 0.1929, "step": 3393 }, { "epoch": 19.067605633802817, "grad_norm": 0.9497038125991821, "learning_rate": 1.0173e-05, "loss": 0.1351, "step": 3394 }, { "epoch": 19.07323943661972, "grad_norm": 0.570999026298523, "learning_rate": 1.0176e-05, "loss": 0.1605, "step": 3395 }, { "epoch": 19.07887323943662, "grad_norm": 0.8837947249412537, "learning_rate": 1.0179e-05, "loss": 0.1326, "step": 3396 }, { "epoch": 19.08450704225352, "grad_norm": 0.8161166310310364, "learning_rate": 1.0182e-05, "loss": 0.0963, "step": 3397 }, { "epoch": 19.090140845070422, "grad_norm": 0.6453783512115479, "learning_rate": 1.0185000000000002e-05, "loss": 0.1087, "step": 3398 }, { "epoch": 19.095774647887325, "grad_norm": 0.7032025456428528, "learning_rate": 1.0188000000000001e-05, "loss": 0.1206, "step": 3399 }, { "epoch": 19.101408450704227, "grad_norm": 0.6809446215629578, "learning_rate": 1.0191e-05, "loss": 0.108, "step": 3400 }, { "epoch": 19.107042253521126, "grad_norm": 0.6461032032966614, "learning_rate": 1.0194e-05, "loss": 0.0823, "step": 3401 }, { "epoch": 19.112676056338028, "grad_norm": 0.553854763507843, "learning_rate": 1.0196999999999999e-05, "loss": 0.0678, "step": 3402 }, { "epoch": 19.11830985915493, "grad_norm": 0.6493980884552002, "learning_rate": 1.02e-05, "loss": 0.1004, "step": 3403 }, { "epoch": 19.123943661971833, "grad_norm": 0.8784224987030029, "learning_rate": 1.0203e-05, "loss": 0.0866, "step": 3404 }, { "epoch": 19.12957746478873, "grad_norm": 0.49829554557800293, "learning_rate": 1.0206e-05, "loss": 0.0748, "step": 3405 }, { "epoch": 19.135211267605634, "grad_norm": 0.4523729681968689, "learning_rate": 1.0209e-05, "loss": 0.0682, "step": 3406 }, { "epoch": 19.140845070422536, "grad_norm": 0.7991323471069336, "learning_rate": 1.0212e-05, "loss": 0.0951, "step": 3407 }, { "epoch": 19.146478873239438, "grad_norm": 0.5061530470848083, "learning_rate": 1.0215000000000001e-05, "loss": 0.0597, "step": 3408 }, { "epoch": 19.152112676056337, "grad_norm": 1.9762192964553833, "learning_rate": 1.0218000000000001e-05, "loss": 0.1317, "step": 3409 }, { "epoch": 19.15774647887324, "grad_norm": 0.5480526089668274, "learning_rate": 1.0221000000000001e-05, "loss": 0.0697, "step": 3410 }, { "epoch": 19.16338028169014, "grad_norm": 0.4929368197917938, "learning_rate": 1.0224e-05, "loss": 0.066, "step": 3411 }, { "epoch": 19.169014084507044, "grad_norm": 0.6358106732368469, "learning_rate": 1.0227e-05, "loss": 0.0775, "step": 3412 }, { "epoch": 19.174647887323943, "grad_norm": 0.6002525091171265, "learning_rate": 1.023e-05, "loss": 0.0487, "step": 3413 }, { "epoch": 19.180281690140845, "grad_norm": 1.039305329322815, "learning_rate": 1.0233e-05, "loss": 0.064, "step": 3414 }, { "epoch": 19.185915492957747, "grad_norm": 0.7124713063240051, "learning_rate": 1.0236e-05, "loss": 0.0469, "step": 3415 }, { "epoch": 19.19154929577465, "grad_norm": 0.4746576249599457, "learning_rate": 1.0239e-05, "loss": 0.0589, "step": 3416 }, { "epoch": 19.197183098591548, "grad_norm": 0.6392598152160645, "learning_rate": 1.0242e-05, "loss": 0.0709, "step": 3417 }, { "epoch": 19.20281690140845, "grad_norm": 0.7007731795310974, "learning_rate": 1.0245000000000001e-05, "loss": 0.0829, "step": 3418 }, { "epoch": 19.208450704225353, "grad_norm": 0.6440048813819885, "learning_rate": 1.0248e-05, "loss": 0.0748, "step": 3419 }, { "epoch": 19.214084507042255, "grad_norm": 0.7444767951965332, "learning_rate": 1.0251e-05, "loss": 0.0559, "step": 3420 }, { "epoch": 19.219718309859154, "grad_norm": 0.6129188537597656, "learning_rate": 1.0254e-05, "loss": 0.0438, "step": 3421 }, { "epoch": 19.225352112676056, "grad_norm": 0.6833425164222717, "learning_rate": 1.0257e-05, "loss": 0.0709, "step": 3422 }, { "epoch": 19.23098591549296, "grad_norm": 0.819930911064148, "learning_rate": 1.0260000000000002e-05, "loss": 0.0368, "step": 3423 }, { "epoch": 19.23661971830986, "grad_norm": 0.8019630312919617, "learning_rate": 1.0263000000000002e-05, "loss": 0.059, "step": 3424 }, { "epoch": 19.24225352112676, "grad_norm": 1.6505316495895386, "learning_rate": 1.0266e-05, "loss": 0.0827, "step": 3425 }, { "epoch": 19.24788732394366, "grad_norm": 1.0171784162521362, "learning_rate": 1.0269e-05, "loss": 0.1173, "step": 3426 }, { "epoch": 19.253521126760564, "grad_norm": 0.8716493844985962, "learning_rate": 1.0272e-05, "loss": 0.2709, "step": 3427 }, { "epoch": 19.259154929577466, "grad_norm": 0.8670639991760254, "learning_rate": 1.0275e-05, "loss": 0.194, "step": 3428 }, { "epoch": 19.264788732394365, "grad_norm": 0.60969477891922, "learning_rate": 1.0278e-05, "loss": 0.1517, "step": 3429 }, { "epoch": 19.270422535211267, "grad_norm": 0.5559886693954468, "learning_rate": 1.0281e-05, "loss": 0.1661, "step": 3430 }, { "epoch": 19.27605633802817, "grad_norm": 0.6381521821022034, "learning_rate": 1.0284e-05, "loss": 0.1336, "step": 3431 }, { "epoch": 19.281690140845072, "grad_norm": 0.8647052049636841, "learning_rate": 1.0287e-05, "loss": 0.1757, "step": 3432 }, { "epoch": 19.28732394366197, "grad_norm": 0.6230176091194153, "learning_rate": 1.0290000000000001e-05, "loss": 0.1407, "step": 3433 }, { "epoch": 19.292957746478873, "grad_norm": 0.6390235424041748, "learning_rate": 1.0293000000000001e-05, "loss": 0.1544, "step": 3434 }, { "epoch": 19.298591549295775, "grad_norm": 0.841249406337738, "learning_rate": 1.0296000000000001e-05, "loss": 0.193, "step": 3435 }, { "epoch": 19.304225352112677, "grad_norm": 0.5989041924476624, "learning_rate": 1.0299e-05, "loss": 0.131, "step": 3436 }, { "epoch": 19.309859154929576, "grad_norm": 0.9917040467262268, "learning_rate": 1.0301999999999999e-05, "loss": 0.1647, "step": 3437 }, { "epoch": 19.31549295774648, "grad_norm": 0.7596990466117859, "learning_rate": 1.0305e-05, "loss": 0.1368, "step": 3438 }, { "epoch": 19.32112676056338, "grad_norm": 0.8295702934265137, "learning_rate": 1.0308e-05, "loss": 0.1335, "step": 3439 }, { "epoch": 19.326760563380283, "grad_norm": 0.9527475833892822, "learning_rate": 1.0311e-05, "loss": 0.1933, "step": 3440 }, { "epoch": 19.33239436619718, "grad_norm": 1.1594103574752808, "learning_rate": 1.0314e-05, "loss": 0.1444, "step": 3441 }, { "epoch": 19.338028169014084, "grad_norm": 0.7379975318908691, "learning_rate": 1.0317e-05, "loss": 0.1348, "step": 3442 }, { "epoch": 19.343661971830986, "grad_norm": 0.5773608684539795, "learning_rate": 1.032e-05, "loss": 0.1001, "step": 3443 }, { "epoch": 19.34929577464789, "grad_norm": 0.6188704967498779, "learning_rate": 1.0323000000000001e-05, "loss": 0.1098, "step": 3444 }, { "epoch": 19.354929577464787, "grad_norm": 0.8876172304153442, "learning_rate": 1.0326e-05, "loss": 0.168, "step": 3445 }, { "epoch": 19.36056338028169, "grad_norm": 0.5684492588043213, "learning_rate": 1.0329e-05, "loss": 0.0923, "step": 3446 }, { "epoch": 19.366197183098592, "grad_norm": 2.51430344581604, "learning_rate": 1.0332e-05, "loss": 0.1154, "step": 3447 }, { "epoch": 19.371830985915494, "grad_norm": 0.7748844027519226, "learning_rate": 1.0335e-05, "loss": 0.0867, "step": 3448 }, { "epoch": 19.377464788732393, "grad_norm": 0.6899228692054749, "learning_rate": 1.0338e-05, "loss": 0.1148, "step": 3449 }, { "epoch": 19.383098591549295, "grad_norm": 0.8775354027748108, "learning_rate": 1.0341e-05, "loss": 0.1076, "step": 3450 }, { "epoch": 19.388732394366198, "grad_norm": 0.6085919141769409, "learning_rate": 1.0344e-05, "loss": 0.0814, "step": 3451 }, { "epoch": 19.3943661971831, "grad_norm": 1.582014799118042, "learning_rate": 1.0347e-05, "loss": 0.0927, "step": 3452 }, { "epoch": 19.4, "grad_norm": 0.9713743925094604, "learning_rate": 1.035e-05, "loss": 0.1142, "step": 3453 }, { "epoch": 19.4056338028169, "grad_norm": 0.728151261806488, "learning_rate": 1.0353e-05, "loss": 0.0957, "step": 3454 }, { "epoch": 19.411267605633803, "grad_norm": 0.5586870908737183, "learning_rate": 1.0356e-05, "loss": 0.0442, "step": 3455 }, { "epoch": 19.416901408450705, "grad_norm": 0.8020830154418945, "learning_rate": 1.0359e-05, "loss": 0.1185, "step": 3456 }, { "epoch": 19.422535211267604, "grad_norm": 0.6126598715782166, "learning_rate": 1.0362e-05, "loss": 0.0796, "step": 3457 }, { "epoch": 19.428169014084506, "grad_norm": 0.7148090600967407, "learning_rate": 1.0365e-05, "loss": 0.0671, "step": 3458 }, { "epoch": 19.43380281690141, "grad_norm": 1.7327015399932861, "learning_rate": 1.0368000000000001e-05, "loss": 0.1381, "step": 3459 }, { "epoch": 19.43943661971831, "grad_norm": 0.7613572478294373, "learning_rate": 1.0371000000000001e-05, "loss": 0.1164, "step": 3460 }, { "epoch": 19.44507042253521, "grad_norm": 0.757022500038147, "learning_rate": 1.0374000000000001e-05, "loss": 0.0543, "step": 3461 }, { "epoch": 19.450704225352112, "grad_norm": 0.5491852164268494, "learning_rate": 1.0376999999999999e-05, "loss": 0.1079, "step": 3462 }, { "epoch": 19.456338028169014, "grad_norm": 0.8017126321792603, "learning_rate": 1.0379999999999999e-05, "loss": 0.0994, "step": 3463 }, { "epoch": 19.461971830985917, "grad_norm": 0.6213554739952087, "learning_rate": 1.0383e-05, "loss": 0.0486, "step": 3464 }, { "epoch": 19.467605633802815, "grad_norm": 0.5454593300819397, "learning_rate": 1.0386e-05, "loss": 0.0507, "step": 3465 }, { "epoch": 19.473239436619718, "grad_norm": 0.7971155643463135, "learning_rate": 1.0389e-05, "loss": 0.1112, "step": 3466 }, { "epoch": 19.47887323943662, "grad_norm": 0.6497503519058228, "learning_rate": 1.0392e-05, "loss": 0.0934, "step": 3467 }, { "epoch": 19.484507042253522, "grad_norm": 0.5275029540061951, "learning_rate": 1.0395e-05, "loss": 0.0476, "step": 3468 }, { "epoch": 19.49014084507042, "grad_norm": 0.7015315890312195, "learning_rate": 1.0398000000000001e-05, "loss": 0.0774, "step": 3469 }, { "epoch": 19.495774647887323, "grad_norm": 1.38868248462677, "learning_rate": 1.0401000000000001e-05, "loss": 0.0743, "step": 3470 }, { "epoch": 19.501408450704226, "grad_norm": 0.913762092590332, "learning_rate": 1.0404e-05, "loss": 0.2429, "step": 3471 }, { "epoch": 19.507042253521128, "grad_norm": 0.8375852704048157, "learning_rate": 1.0407e-05, "loss": 0.1967, "step": 3472 }, { "epoch": 19.512676056338027, "grad_norm": 0.867321252822876, "learning_rate": 1.041e-05, "loss": 0.2138, "step": 3473 }, { "epoch": 19.51830985915493, "grad_norm": 0.9633779525756836, "learning_rate": 1.0413e-05, "loss": 0.1967, "step": 3474 }, { "epoch": 19.52394366197183, "grad_norm": 0.7590082287788391, "learning_rate": 1.0416e-05, "loss": 0.1876, "step": 3475 }, { "epoch": 19.529577464788733, "grad_norm": 0.761542797088623, "learning_rate": 1.0419e-05, "loss": 0.195, "step": 3476 }, { "epoch": 19.535211267605632, "grad_norm": 0.6719033718109131, "learning_rate": 1.0422e-05, "loss": 0.1815, "step": 3477 }, { "epoch": 19.540845070422534, "grad_norm": 0.7922148704528809, "learning_rate": 1.0425e-05, "loss": 0.2396, "step": 3478 }, { "epoch": 19.546478873239437, "grad_norm": 0.6770527362823486, "learning_rate": 1.0428e-05, "loss": 0.1617, "step": 3479 }, { "epoch": 19.55211267605634, "grad_norm": 0.735686182975769, "learning_rate": 1.0431e-05, "loss": 0.149, "step": 3480 }, { "epoch": 19.557746478873238, "grad_norm": 0.7066181898117065, "learning_rate": 1.0434e-05, "loss": 0.1479, "step": 3481 }, { "epoch": 19.56338028169014, "grad_norm": 0.5480279326438904, "learning_rate": 1.0437e-05, "loss": 0.0953, "step": 3482 }, { "epoch": 19.569014084507042, "grad_norm": 0.9581335186958313, "learning_rate": 1.044e-05, "loss": 0.1712, "step": 3483 }, { "epoch": 19.574647887323945, "grad_norm": 0.6949595212936401, "learning_rate": 1.0443000000000001e-05, "loss": 0.0992, "step": 3484 }, { "epoch": 19.580281690140843, "grad_norm": 0.762494683265686, "learning_rate": 1.0446000000000001e-05, "loss": 0.1729, "step": 3485 }, { "epoch": 19.585915492957746, "grad_norm": 0.9986592531204224, "learning_rate": 1.0449e-05, "loss": 0.1737, "step": 3486 }, { "epoch": 19.591549295774648, "grad_norm": 0.6033639907836914, "learning_rate": 1.0452e-05, "loss": 0.0907, "step": 3487 }, { "epoch": 19.59718309859155, "grad_norm": 0.82533198595047, "learning_rate": 1.0454999999999999e-05, "loss": 0.1354, "step": 3488 }, { "epoch": 19.60281690140845, "grad_norm": 0.63410484790802, "learning_rate": 1.0458e-05, "loss": 0.0783, "step": 3489 }, { "epoch": 19.60845070422535, "grad_norm": 0.6849428415298462, "learning_rate": 1.0461e-05, "loss": 0.1203, "step": 3490 }, { "epoch": 19.614084507042254, "grad_norm": 0.6679311990737915, "learning_rate": 1.0464e-05, "loss": 0.123, "step": 3491 }, { "epoch": 19.619718309859156, "grad_norm": 0.7235432267189026, "learning_rate": 1.0467e-05, "loss": 0.1146, "step": 3492 }, { "epoch": 19.625352112676055, "grad_norm": 0.6556351780891418, "learning_rate": 1.047e-05, "loss": 0.108, "step": 3493 }, { "epoch": 19.630985915492957, "grad_norm": 0.5118615627288818, "learning_rate": 1.0473000000000001e-05, "loss": 0.0642, "step": 3494 }, { "epoch": 19.63661971830986, "grad_norm": 0.6425915956497192, "learning_rate": 1.0476000000000001e-05, "loss": 0.0953, "step": 3495 }, { "epoch": 19.64225352112676, "grad_norm": 0.9708284139633179, "learning_rate": 1.0479e-05, "loss": 0.1099, "step": 3496 }, { "epoch": 19.647887323943664, "grad_norm": 0.5938459634780884, "learning_rate": 1.0482e-05, "loss": 0.084, "step": 3497 }, { "epoch": 19.653521126760563, "grad_norm": 0.4997764527797699, "learning_rate": 1.0485e-05, "loss": 0.0591, "step": 3498 }, { "epoch": 19.659154929577465, "grad_norm": 0.6850653886795044, "learning_rate": 1.0488e-05, "loss": 0.0766, "step": 3499 }, { "epoch": 19.664788732394367, "grad_norm": 0.5942131280899048, "learning_rate": 1.0491e-05, "loss": 0.0724, "step": 3500 }, { "epoch": 19.670422535211266, "grad_norm": 0.7699737548828125, "learning_rate": 1.0494e-05, "loss": 0.0747, "step": 3501 }, { "epoch": 19.676056338028168, "grad_norm": 0.9811207056045532, "learning_rate": 1.0497e-05, "loss": 0.0928, "step": 3502 }, { "epoch": 19.68169014084507, "grad_norm": 1.2506821155548096, "learning_rate": 1.05e-05, "loss": 0.1291, "step": 3503 }, { "epoch": 19.687323943661973, "grad_norm": 0.8511802554130554, "learning_rate": 1.0503000000000001e-05, "loss": 0.1294, "step": 3504 }, { "epoch": 19.692957746478875, "grad_norm": 0.8406641483306885, "learning_rate": 1.0506e-05, "loss": 0.0691, "step": 3505 }, { "epoch": 19.698591549295774, "grad_norm": 0.6100146770477295, "learning_rate": 1.0509e-05, "loss": 0.0659, "step": 3506 }, { "epoch": 19.704225352112676, "grad_norm": 0.5758851170539856, "learning_rate": 1.0512e-05, "loss": 0.101, "step": 3507 }, { "epoch": 19.70985915492958, "grad_norm": 1.3413009643554688, "learning_rate": 1.0515e-05, "loss": 0.0928, "step": 3508 }, { "epoch": 19.71549295774648, "grad_norm": 1.726977825164795, "learning_rate": 1.0518000000000002e-05, "loss": 0.1501, "step": 3509 }, { "epoch": 19.72112676056338, "grad_norm": 0.7487221956253052, "learning_rate": 1.0521000000000001e-05, "loss": 0.0636, "step": 3510 }, { "epoch": 19.72676056338028, "grad_norm": 0.7851627469062805, "learning_rate": 1.0524e-05, "loss": 0.0574, "step": 3511 }, { "epoch": 19.732394366197184, "grad_norm": 0.499793142080307, "learning_rate": 1.0527e-05, "loss": 0.068, "step": 3512 }, { "epoch": 19.738028169014086, "grad_norm": 0.8681271076202393, "learning_rate": 1.0529999999999999e-05, "loss": 0.0562, "step": 3513 }, { "epoch": 19.743661971830985, "grad_norm": 0.6916260719299316, "learning_rate": 1.0533e-05, "loss": 0.0293, "step": 3514 }, { "epoch": 19.749295774647887, "grad_norm": 1.0687271356582642, "learning_rate": 1.0536e-05, "loss": 0.2441, "step": 3515 }, { "epoch": 19.75492957746479, "grad_norm": 0.9270164370536804, "learning_rate": 1.0539e-05, "loss": 0.2165, "step": 3516 }, { "epoch": 19.760563380281692, "grad_norm": 0.9246382117271423, "learning_rate": 1.0542e-05, "loss": 0.2675, "step": 3517 }, { "epoch": 19.76619718309859, "grad_norm": 0.690577507019043, "learning_rate": 1.0545e-05, "loss": 0.1735, "step": 3518 }, { "epoch": 19.771830985915493, "grad_norm": 0.8468503355979919, "learning_rate": 1.0548000000000001e-05, "loss": 0.2048, "step": 3519 }, { "epoch": 19.777464788732395, "grad_norm": 0.8130530714988708, "learning_rate": 1.0551000000000001e-05, "loss": 0.1906, "step": 3520 }, { "epoch": 19.783098591549297, "grad_norm": 0.7793546319007874, "learning_rate": 1.0554000000000001e-05, "loss": 0.1588, "step": 3521 }, { "epoch": 19.788732394366196, "grad_norm": 1.072245478630066, "learning_rate": 1.0557e-05, "loss": 0.1946, "step": 3522 }, { "epoch": 19.7943661971831, "grad_norm": 0.7047404050827026, "learning_rate": 1.0559999999999999e-05, "loss": 0.1169, "step": 3523 }, { "epoch": 19.8, "grad_norm": 0.6003009080886841, "learning_rate": 1.0563e-05, "loss": 0.1603, "step": 3524 }, { "epoch": 19.805633802816903, "grad_norm": 0.6713486909866333, "learning_rate": 1.0566e-05, "loss": 0.1689, "step": 3525 }, { "epoch": 19.8112676056338, "grad_norm": 0.5327715873718262, "learning_rate": 1.0569e-05, "loss": 0.1111, "step": 3526 }, { "epoch": 19.816901408450704, "grad_norm": 1.1909104585647583, "learning_rate": 1.0572e-05, "loss": 0.1975, "step": 3527 }, { "epoch": 19.822535211267606, "grad_norm": 0.6418676376342773, "learning_rate": 1.0575e-05, "loss": 0.1037, "step": 3528 }, { "epoch": 19.82816901408451, "grad_norm": 0.723684549331665, "learning_rate": 1.0578000000000001e-05, "loss": 0.1203, "step": 3529 }, { "epoch": 19.833802816901407, "grad_norm": 0.6391441822052002, "learning_rate": 1.0581e-05, "loss": 0.1145, "step": 3530 }, { "epoch": 19.83943661971831, "grad_norm": 0.7955648303031921, "learning_rate": 1.0584e-05, "loss": 0.0935, "step": 3531 }, { "epoch": 19.845070422535212, "grad_norm": 0.7198660373687744, "learning_rate": 1.0587e-05, "loss": 0.089, "step": 3532 }, { "epoch": 19.850704225352114, "grad_norm": 0.6903725266456604, "learning_rate": 1.059e-05, "loss": 0.0963, "step": 3533 }, { "epoch": 19.856338028169013, "grad_norm": 0.8593350648880005, "learning_rate": 1.0593000000000002e-05, "loss": 0.0935, "step": 3534 }, { "epoch": 19.861971830985915, "grad_norm": 1.2588996887207031, "learning_rate": 1.0596e-05, "loss": 0.1752, "step": 3535 }, { "epoch": 19.867605633802818, "grad_norm": 0.8034027218818665, "learning_rate": 1.0599e-05, "loss": 0.1187, "step": 3536 }, { "epoch": 19.87323943661972, "grad_norm": 0.8432564735412598, "learning_rate": 1.0602e-05, "loss": 0.1313, "step": 3537 }, { "epoch": 19.87887323943662, "grad_norm": 0.7983089685440063, "learning_rate": 1.0605e-05, "loss": 0.0962, "step": 3538 }, { "epoch": 19.88450704225352, "grad_norm": 0.6656982898712158, "learning_rate": 1.0608e-05, "loss": 0.1216, "step": 3539 }, { "epoch": 19.890140845070423, "grad_norm": 0.6676833629608154, "learning_rate": 1.0611e-05, "loss": 0.0732, "step": 3540 }, { "epoch": 19.895774647887325, "grad_norm": 0.6544631719589233, "learning_rate": 1.0614e-05, "loss": 0.0714, "step": 3541 }, { "epoch": 19.901408450704224, "grad_norm": 0.7015381455421448, "learning_rate": 1.0617e-05, "loss": 0.0926, "step": 3542 }, { "epoch": 19.907042253521126, "grad_norm": 1.2649813890457153, "learning_rate": 1.062e-05, "loss": 0.0976, "step": 3543 }, { "epoch": 19.91267605633803, "grad_norm": 0.6560747027397156, "learning_rate": 1.0623000000000001e-05, "loss": 0.091, "step": 3544 }, { "epoch": 19.91830985915493, "grad_norm": 0.7787750959396362, "learning_rate": 1.0626000000000001e-05, "loss": 0.1282, "step": 3545 }, { "epoch": 19.92394366197183, "grad_norm": 3.251315116882324, "learning_rate": 1.0629000000000001e-05, "loss": 0.0529, "step": 3546 }, { "epoch": 19.929577464788732, "grad_norm": 0.6497890949249268, "learning_rate": 1.0632000000000001e-05, "loss": 0.0782, "step": 3547 }, { "epoch": 19.935211267605634, "grad_norm": 0.48037949204444885, "learning_rate": 1.0634999999999999e-05, "loss": 0.103, "step": 3548 }, { "epoch": 19.940845070422537, "grad_norm": 0.5206389427185059, "learning_rate": 1.0638e-05, "loss": 0.0602, "step": 3549 }, { "epoch": 19.946478873239435, "grad_norm": 0.5523713231086731, "learning_rate": 1.0641e-05, "loss": 0.0636, "step": 3550 }, { "epoch": 19.952112676056338, "grad_norm": 0.6053462624549866, "learning_rate": 1.0644e-05, "loss": 0.0697, "step": 3551 }, { "epoch": 19.95774647887324, "grad_norm": 0.8266287446022034, "learning_rate": 1.0647e-05, "loss": 0.0895, "step": 3552 }, { "epoch": 19.963380281690142, "grad_norm": 0.6095572710037231, "learning_rate": 1.065e-05, "loss": 0.0575, "step": 3553 }, { "epoch": 19.96901408450704, "grad_norm": 0.5283779501914978, "learning_rate": 1.0653000000000001e-05, "loss": 0.0471, "step": 3554 }, { "epoch": 19.974647887323943, "grad_norm": 0.6338978409767151, "learning_rate": 1.0656000000000001e-05, "loss": 0.0378, "step": 3555 }, { "epoch": 19.980281690140846, "grad_norm": 0.6728408932685852, "learning_rate": 1.0659e-05, "loss": 0.0561, "step": 3556 }, { "epoch": 19.985915492957748, "grad_norm": 0.7710195779800415, "learning_rate": 1.0662e-05, "loss": 0.1064, "step": 3557 }, { "epoch": 19.991549295774647, "grad_norm": 0.7771931886672974, "learning_rate": 1.0665e-05, "loss": 0.0615, "step": 3558 }, { "epoch": 19.99718309859155, "grad_norm": 0.7959868907928467, "learning_rate": 1.0668000000000002e-05, "loss": 0.14, "step": 3559 }, { "epoch": 20.0, "grad_norm": 0.3920738995075226, "learning_rate": 1.0671e-05, "loss": 0.0186, "step": 3560 }, { "epoch": 20.005633802816902, "grad_norm": 0.8700683116912842, "learning_rate": 1.0674e-05, "loss": 0.2741, "step": 3561 }, { "epoch": 20.011267605633805, "grad_norm": 0.7142202854156494, "learning_rate": 1.0677e-05, "loss": 0.1832, "step": 3562 }, { "epoch": 20.016901408450703, "grad_norm": 0.827530026435852, "learning_rate": 1.068e-05, "loss": 0.1676, "step": 3563 }, { "epoch": 20.022535211267606, "grad_norm": 0.7573842406272888, "learning_rate": 1.0683000000000001e-05, "loss": 0.1764, "step": 3564 }, { "epoch": 20.028169014084508, "grad_norm": 0.6948383450508118, "learning_rate": 1.0686e-05, "loss": 0.1898, "step": 3565 }, { "epoch": 20.03380281690141, "grad_norm": 0.8493865132331848, "learning_rate": 1.0689e-05, "loss": 0.1886, "step": 3566 }, { "epoch": 20.03943661971831, "grad_norm": 0.7057263255119324, "learning_rate": 1.0692e-05, "loss": 0.1151, "step": 3567 }, { "epoch": 20.04507042253521, "grad_norm": 0.7424767017364502, "learning_rate": 1.0695e-05, "loss": 0.2577, "step": 3568 }, { "epoch": 20.050704225352113, "grad_norm": 0.6872847080230713, "learning_rate": 1.0698e-05, "loss": 0.1384, "step": 3569 }, { "epoch": 20.056338028169016, "grad_norm": 0.583867609500885, "learning_rate": 1.0701000000000001e-05, "loss": 0.102, "step": 3570 }, { "epoch": 20.061971830985915, "grad_norm": 0.6354519724845886, "learning_rate": 1.0704000000000001e-05, "loss": 0.1012, "step": 3571 }, { "epoch": 20.067605633802817, "grad_norm": 0.651496171951294, "learning_rate": 1.0707e-05, "loss": 0.1126, "step": 3572 }, { "epoch": 20.07323943661972, "grad_norm": 0.7743821144104004, "learning_rate": 1.0709999999999999e-05, "loss": 0.1472, "step": 3573 }, { "epoch": 20.07887323943662, "grad_norm": 0.7289628386497498, "learning_rate": 1.0712999999999999e-05, "loss": 0.1096, "step": 3574 }, { "epoch": 20.08450704225352, "grad_norm": 0.5792043209075928, "learning_rate": 1.0716e-05, "loss": 0.1082, "step": 3575 }, { "epoch": 20.090140845070422, "grad_norm": 0.8318267464637756, "learning_rate": 1.0719e-05, "loss": 0.1405, "step": 3576 }, { "epoch": 20.095774647887325, "grad_norm": 0.609465479850769, "learning_rate": 1.0722e-05, "loss": 0.082, "step": 3577 }, { "epoch": 20.101408450704227, "grad_norm": 0.7519035935401917, "learning_rate": 1.0725e-05, "loss": 0.118, "step": 3578 }, { "epoch": 20.107042253521126, "grad_norm": 0.6272584199905396, "learning_rate": 1.0728e-05, "loss": 0.0911, "step": 3579 }, { "epoch": 20.112676056338028, "grad_norm": 0.6009683012962341, "learning_rate": 1.0731000000000001e-05, "loss": 0.0895, "step": 3580 }, { "epoch": 20.11830985915493, "grad_norm": 0.8232515454292297, "learning_rate": 1.0734000000000001e-05, "loss": 0.1128, "step": 3581 }, { "epoch": 20.123943661971833, "grad_norm": 0.8043489456176758, "learning_rate": 1.0737e-05, "loss": 0.0842, "step": 3582 }, { "epoch": 20.12957746478873, "grad_norm": 1.824074387550354, "learning_rate": 1.074e-05, "loss": 0.1749, "step": 3583 }, { "epoch": 20.135211267605634, "grad_norm": 0.7493594884872437, "learning_rate": 1.0743e-05, "loss": 0.0983, "step": 3584 }, { "epoch": 20.140845070422536, "grad_norm": 0.5792694091796875, "learning_rate": 1.0746e-05, "loss": 0.0693, "step": 3585 }, { "epoch": 20.146478873239438, "grad_norm": 0.7018504738807678, "learning_rate": 1.0749e-05, "loss": 0.0914, "step": 3586 }, { "epoch": 20.152112676056337, "grad_norm": 0.7410435676574707, "learning_rate": 1.0752e-05, "loss": 0.0843, "step": 3587 }, { "epoch": 20.15774647887324, "grad_norm": 0.8822108507156372, "learning_rate": 1.0755e-05, "loss": 0.0857, "step": 3588 }, { "epoch": 20.16338028169014, "grad_norm": 0.6504114270210266, "learning_rate": 1.0758e-05, "loss": 0.0653, "step": 3589 }, { "epoch": 20.169014084507044, "grad_norm": 0.6410222053527832, "learning_rate": 1.0761e-05, "loss": 0.0979, "step": 3590 }, { "epoch": 20.174647887323943, "grad_norm": 0.8883417248725891, "learning_rate": 1.0764e-05, "loss": 0.0881, "step": 3591 }, { "epoch": 20.180281690140845, "grad_norm": 0.8289663791656494, "learning_rate": 1.0767e-05, "loss": 0.0793, "step": 3592 }, { "epoch": 20.185915492957747, "grad_norm": 0.4834751486778259, "learning_rate": 1.077e-05, "loss": 0.0989, "step": 3593 }, { "epoch": 20.19154929577465, "grad_norm": 0.7209324836730957, "learning_rate": 1.0773e-05, "loss": 0.0881, "step": 3594 }, { "epoch": 20.197183098591548, "grad_norm": 0.6014270186424255, "learning_rate": 1.0776000000000002e-05, "loss": 0.0869, "step": 3595 }, { "epoch": 20.20281690140845, "grad_norm": 4.316483020782471, "learning_rate": 1.0779000000000001e-05, "loss": 0.0631, "step": 3596 }, { "epoch": 20.208450704225353, "grad_norm": 0.8004668354988098, "learning_rate": 1.0782e-05, "loss": 0.0747, "step": 3597 }, { "epoch": 20.214084507042255, "grad_norm": 0.5428716540336609, "learning_rate": 1.0785e-05, "loss": 0.0404, "step": 3598 }, { "epoch": 20.219718309859154, "grad_norm": 1.031782865524292, "learning_rate": 1.0787999999999999e-05, "loss": 0.0627, "step": 3599 }, { "epoch": 20.225352112676056, "grad_norm": 0.9309151768684387, "learning_rate": 1.0791e-05, "loss": 0.1189, "step": 3600 }, { "epoch": 20.23098591549296, "grad_norm": 0.8860088586807251, "learning_rate": 1.0794e-05, "loss": 0.0392, "step": 3601 }, { "epoch": 20.23661971830986, "grad_norm": 1.5303562879562378, "learning_rate": 1.0797e-05, "loss": 0.1442, "step": 3602 }, { "epoch": 20.24225352112676, "grad_norm": 0.6062420010566711, "learning_rate": 1.08e-05, "loss": 0.0447, "step": 3603 }, { "epoch": 20.24788732394366, "grad_norm": 0.9654964208602905, "learning_rate": 1.0803e-05, "loss": 0.064, "step": 3604 }, { "epoch": 20.253521126760564, "grad_norm": 0.7768142819404602, "learning_rate": 1.0806000000000001e-05, "loss": 0.2384, "step": 3605 }, { "epoch": 20.259154929577466, "grad_norm": 0.772476851940155, "learning_rate": 1.0809000000000001e-05, "loss": 0.2025, "step": 3606 }, { "epoch": 20.264788732394365, "grad_norm": 0.8470531702041626, "learning_rate": 1.0812e-05, "loss": 0.1935, "step": 3607 }, { "epoch": 20.270422535211267, "grad_norm": 0.8296957612037659, "learning_rate": 1.0815e-05, "loss": 0.2122, "step": 3608 }, { "epoch": 20.27605633802817, "grad_norm": 0.8731991052627563, "learning_rate": 1.0817999999999999e-05, "loss": 0.1639, "step": 3609 }, { "epoch": 20.281690140845072, "grad_norm": 0.6515117883682251, "learning_rate": 1.0821e-05, "loss": 0.1433, "step": 3610 }, { "epoch": 20.28732394366197, "grad_norm": 0.7130858302116394, "learning_rate": 1.0824e-05, "loss": 0.1403, "step": 3611 }, { "epoch": 20.292957746478873, "grad_norm": 0.6496838331222534, "learning_rate": 1.0827e-05, "loss": 0.1492, "step": 3612 }, { "epoch": 20.298591549295775, "grad_norm": 0.5560324192047119, "learning_rate": 1.083e-05, "loss": 0.1257, "step": 3613 }, { "epoch": 20.304225352112677, "grad_norm": 0.7063159942626953, "learning_rate": 1.0833e-05, "loss": 0.1584, "step": 3614 }, { "epoch": 20.309859154929576, "grad_norm": 1.4253878593444824, "learning_rate": 1.0836000000000001e-05, "loss": 0.2042, "step": 3615 }, { "epoch": 20.31549295774648, "grad_norm": 0.6545450687408447, "learning_rate": 1.0839e-05, "loss": 0.1309, "step": 3616 }, { "epoch": 20.32112676056338, "grad_norm": 1.0281322002410889, "learning_rate": 1.0842e-05, "loss": 0.1267, "step": 3617 }, { "epoch": 20.326760563380283, "grad_norm": 0.9518589377403259, "learning_rate": 1.0845e-05, "loss": 0.1892, "step": 3618 }, { "epoch": 20.33239436619718, "grad_norm": 0.5213984847068787, "learning_rate": 1.0848e-05, "loss": 0.0895, "step": 3619 }, { "epoch": 20.338028169014084, "grad_norm": 0.6750963926315308, "learning_rate": 1.0851000000000002e-05, "loss": 0.109, "step": 3620 }, { "epoch": 20.343661971830986, "grad_norm": 0.6590588092803955, "learning_rate": 1.0854e-05, "loss": 0.1009, "step": 3621 }, { "epoch": 20.34929577464789, "grad_norm": 0.8653169870376587, "learning_rate": 1.0857e-05, "loss": 0.1536, "step": 3622 }, { "epoch": 20.354929577464787, "grad_norm": 0.7906818389892578, "learning_rate": 1.086e-05, "loss": 0.1183, "step": 3623 }, { "epoch": 20.36056338028169, "grad_norm": 0.6682420969009399, "learning_rate": 1.0863e-05, "loss": 0.0849, "step": 3624 }, { "epoch": 20.366197183098592, "grad_norm": 0.7462722063064575, "learning_rate": 1.0866e-05, "loss": 0.1354, "step": 3625 }, { "epoch": 20.371830985915494, "grad_norm": 0.8670287132263184, "learning_rate": 1.0869e-05, "loss": 0.1649, "step": 3626 }, { "epoch": 20.377464788732393, "grad_norm": 0.6034746170043945, "learning_rate": 1.0872e-05, "loss": 0.094, "step": 3627 }, { "epoch": 20.383098591549295, "grad_norm": 0.8181434273719788, "learning_rate": 1.0875e-05, "loss": 0.0794, "step": 3628 }, { "epoch": 20.388732394366198, "grad_norm": 0.7256209254264832, "learning_rate": 1.0878e-05, "loss": 0.1092, "step": 3629 }, { "epoch": 20.3943661971831, "grad_norm": 0.5929458737373352, "learning_rate": 1.0881000000000001e-05, "loss": 0.0779, "step": 3630 }, { "epoch": 20.4, "grad_norm": 0.8549261093139648, "learning_rate": 1.0884000000000001e-05, "loss": 0.098, "step": 3631 }, { "epoch": 20.4056338028169, "grad_norm": 0.5710935592651367, "learning_rate": 1.0887000000000001e-05, "loss": 0.0635, "step": 3632 }, { "epoch": 20.411267605633803, "grad_norm": 1.0220928192138672, "learning_rate": 1.089e-05, "loss": 0.0811, "step": 3633 }, { "epoch": 20.416901408450705, "grad_norm": 1.2843317985534668, "learning_rate": 1.0892999999999999e-05, "loss": 0.055, "step": 3634 }, { "epoch": 20.422535211267604, "grad_norm": 0.7190369367599487, "learning_rate": 1.0896e-05, "loss": 0.0707, "step": 3635 }, { "epoch": 20.428169014084506, "grad_norm": 0.7941792607307434, "learning_rate": 1.0899e-05, "loss": 0.0804, "step": 3636 }, { "epoch": 20.43380281690141, "grad_norm": 0.7485055923461914, "learning_rate": 1.0902e-05, "loss": 0.082, "step": 3637 }, { "epoch": 20.43943661971831, "grad_norm": 0.5211020112037659, "learning_rate": 1.0905e-05, "loss": 0.0359, "step": 3638 }, { "epoch": 20.44507042253521, "grad_norm": 0.521274209022522, "learning_rate": 1.0908e-05, "loss": 0.0657, "step": 3639 }, { "epoch": 20.450704225352112, "grad_norm": 1.2671338319778442, "learning_rate": 1.0911000000000001e-05, "loss": 0.1042, "step": 3640 }, { "epoch": 20.456338028169014, "grad_norm": 1.2189240455627441, "learning_rate": 1.0914000000000001e-05, "loss": 0.0849, "step": 3641 }, { "epoch": 20.461971830985917, "grad_norm": 0.6469488143920898, "learning_rate": 1.0917e-05, "loss": 0.0881, "step": 3642 }, { "epoch": 20.467605633802815, "grad_norm": 0.6637911200523376, "learning_rate": 1.092e-05, "loss": 0.1022, "step": 3643 }, { "epoch": 20.473239436619718, "grad_norm": 0.7222386002540588, "learning_rate": 1.0923e-05, "loss": 0.0441, "step": 3644 }, { "epoch": 20.47887323943662, "grad_norm": 0.5978081822395325, "learning_rate": 1.0926000000000002e-05, "loss": 0.0551, "step": 3645 }, { "epoch": 20.484507042253522, "grad_norm": 1.0089555978775024, "learning_rate": 1.0929e-05, "loss": 0.053, "step": 3646 }, { "epoch": 20.49014084507042, "grad_norm": 0.9780187606811523, "learning_rate": 1.0932e-05, "loss": 0.1158, "step": 3647 }, { "epoch": 20.495774647887323, "grad_norm": 0.6093153953552246, "learning_rate": 1.0935e-05, "loss": 0.029, "step": 3648 }, { "epoch": 20.501408450704226, "grad_norm": 1.0883787870407104, "learning_rate": 1.0938e-05, "loss": 0.2261, "step": 3649 }, { "epoch": 20.507042253521128, "grad_norm": 0.7347003221511841, "learning_rate": 1.0941e-05, "loss": 0.1918, "step": 3650 }, { "epoch": 20.512676056338027, "grad_norm": 0.7922563552856445, "learning_rate": 1.0944e-05, "loss": 0.1737, "step": 3651 }, { "epoch": 20.51830985915493, "grad_norm": 0.9297928810119629, "learning_rate": 1.0947e-05, "loss": 0.1755, "step": 3652 }, { "epoch": 20.52394366197183, "grad_norm": 0.6567859053611755, "learning_rate": 1.095e-05, "loss": 0.1519, "step": 3653 }, { "epoch": 20.529577464788733, "grad_norm": 0.7680755853652954, "learning_rate": 1.0953e-05, "loss": 0.2432, "step": 3654 }, { "epoch": 20.535211267605632, "grad_norm": 0.6143758893013, "learning_rate": 1.0956000000000001e-05, "loss": 0.1603, "step": 3655 }, { "epoch": 20.540845070422534, "grad_norm": 0.6245512366294861, "learning_rate": 1.0959000000000001e-05, "loss": 0.1076, "step": 3656 }, { "epoch": 20.546478873239437, "grad_norm": 0.6992383003234863, "learning_rate": 1.0962000000000001e-05, "loss": 0.1314, "step": 3657 }, { "epoch": 20.55211267605634, "grad_norm": 0.7421095371246338, "learning_rate": 1.0965e-05, "loss": 0.1421, "step": 3658 }, { "epoch": 20.557746478873238, "grad_norm": 0.6711393594741821, "learning_rate": 1.0967999999999999e-05, "loss": 0.1328, "step": 3659 }, { "epoch": 20.56338028169014, "grad_norm": 0.8811914324760437, "learning_rate": 1.0971e-05, "loss": 0.1293, "step": 3660 }, { "epoch": 20.569014084507042, "grad_norm": 0.7190328240394592, "learning_rate": 1.0974e-05, "loss": 0.1677, "step": 3661 }, { "epoch": 20.574647887323945, "grad_norm": 0.6045607328414917, "learning_rate": 1.0977e-05, "loss": 0.1204, "step": 3662 }, { "epoch": 20.580281690140843, "grad_norm": 0.4752415418624878, "learning_rate": 1.098e-05, "loss": 0.0746, "step": 3663 }, { "epoch": 20.585915492957746, "grad_norm": 0.611997663974762, "learning_rate": 1.0983e-05, "loss": 0.1218, "step": 3664 }, { "epoch": 20.591549295774648, "grad_norm": 0.5411200523376465, "learning_rate": 1.0986000000000001e-05, "loss": 0.0824, "step": 3665 }, { "epoch": 20.59718309859155, "grad_norm": 0.6250599026679993, "learning_rate": 1.0989000000000001e-05, "loss": 0.0749, "step": 3666 }, { "epoch": 20.60281690140845, "grad_norm": 0.6309748888015747, "learning_rate": 1.0992e-05, "loss": 0.0712, "step": 3667 }, { "epoch": 20.60845070422535, "grad_norm": 0.6626141667366028, "learning_rate": 1.0995e-05, "loss": 0.0762, "step": 3668 }, { "epoch": 20.614084507042254, "grad_norm": 0.40809550881385803, "learning_rate": 1.0998e-05, "loss": 0.0493, "step": 3669 }, { "epoch": 20.619718309859156, "grad_norm": 0.614365816116333, "learning_rate": 1.1001e-05, "loss": 0.0697, "step": 3670 }, { "epoch": 20.625352112676055, "grad_norm": 0.6260557770729065, "learning_rate": 1.1004e-05, "loss": 0.0752, "step": 3671 }, { "epoch": 20.630985915492957, "grad_norm": 0.6562927961349487, "learning_rate": 1.1007e-05, "loss": 0.0911, "step": 3672 }, { "epoch": 20.63661971830986, "grad_norm": 0.5745455622673035, "learning_rate": 1.101e-05, "loss": 0.0771, "step": 3673 }, { "epoch": 20.64225352112676, "grad_norm": 0.670145571231842, "learning_rate": 1.1013e-05, "loss": 0.0782, "step": 3674 }, { "epoch": 20.647887323943664, "grad_norm": 0.47162699699401855, "learning_rate": 1.1016000000000001e-05, "loss": 0.0414, "step": 3675 }, { "epoch": 20.653521126760563, "grad_norm": 0.5012069940567017, "learning_rate": 1.1019e-05, "loss": 0.0647, "step": 3676 }, { "epoch": 20.659154929577465, "grad_norm": 0.7637709379196167, "learning_rate": 1.1022e-05, "loss": 0.1279, "step": 3677 }, { "epoch": 20.664788732394367, "grad_norm": 0.6966926455497742, "learning_rate": 1.1025e-05, "loss": 0.0719, "step": 3678 }, { "epoch": 20.670422535211266, "grad_norm": 0.7064247727394104, "learning_rate": 1.1028e-05, "loss": 0.0498, "step": 3679 }, { "epoch": 20.676056338028168, "grad_norm": 1.074145793914795, "learning_rate": 1.1031000000000002e-05, "loss": 0.1005, "step": 3680 }, { "epoch": 20.68169014084507, "grad_norm": 1.0301637649536133, "learning_rate": 1.1034000000000001e-05, "loss": 0.115, "step": 3681 }, { "epoch": 20.687323943661973, "grad_norm": 1.3667668104171753, "learning_rate": 1.1037000000000001e-05, "loss": 0.0924, "step": 3682 }, { "epoch": 20.692957746478875, "grad_norm": 0.5538823008537292, "learning_rate": 1.104e-05, "loss": 0.0561, "step": 3683 }, { "epoch": 20.698591549295774, "grad_norm": 0.8277080059051514, "learning_rate": 1.1042999999999999e-05, "loss": 0.0662, "step": 3684 }, { "epoch": 20.704225352112676, "grad_norm": 0.8451762795448303, "learning_rate": 1.1046e-05, "loss": 0.0568, "step": 3685 }, { "epoch": 20.70985915492958, "grad_norm": 0.6463059782981873, "learning_rate": 1.1049e-05, "loss": 0.0618, "step": 3686 }, { "epoch": 20.71549295774648, "grad_norm": 0.9049654603004456, "learning_rate": 1.1052e-05, "loss": 0.1204, "step": 3687 }, { "epoch": 20.72112676056338, "grad_norm": 0.644165575504303, "learning_rate": 1.1055e-05, "loss": 0.0684, "step": 3688 }, { "epoch": 20.72676056338028, "grad_norm": 0.634178876876831, "learning_rate": 1.1058e-05, "loss": 0.0348, "step": 3689 }, { "epoch": 20.732394366197184, "grad_norm": 0.5524457693099976, "learning_rate": 1.1061000000000001e-05, "loss": 0.0259, "step": 3690 }, { "epoch": 20.738028169014086, "grad_norm": 1.0961180925369263, "learning_rate": 1.1064000000000001e-05, "loss": 0.0566, "step": 3691 }, { "epoch": 20.743661971830985, "grad_norm": 0.8555479645729065, "learning_rate": 1.1067000000000001e-05, "loss": 0.0827, "step": 3692 }, { "epoch": 20.749295774647887, "grad_norm": 1.1323118209838867, "learning_rate": 1.107e-05, "loss": 0.2374, "step": 3693 }, { "epoch": 20.75492957746479, "grad_norm": 0.9192282557487488, "learning_rate": 1.1073e-05, "loss": 0.1795, "step": 3694 }, { "epoch": 20.760563380281692, "grad_norm": 0.8357932567596436, "learning_rate": 1.1075999999999999e-05, "loss": 0.2219, "step": 3695 }, { "epoch": 20.76619718309859, "grad_norm": 0.9033218026161194, "learning_rate": 1.1079e-05, "loss": 0.2123, "step": 3696 }, { "epoch": 20.771830985915493, "grad_norm": 0.6063167452812195, "learning_rate": 1.1082e-05, "loss": 0.1705, "step": 3697 }, { "epoch": 20.777464788732395, "grad_norm": 0.7215505838394165, "learning_rate": 1.1085e-05, "loss": 0.2046, "step": 3698 }, { "epoch": 20.783098591549297, "grad_norm": 0.6953071355819702, "learning_rate": 1.1088e-05, "loss": 0.1397, "step": 3699 }, { "epoch": 20.788732394366196, "grad_norm": 0.6690040826797485, "learning_rate": 1.1091e-05, "loss": 0.1478, "step": 3700 }, { "epoch": 20.7943661971831, "grad_norm": 1.0102976560592651, "learning_rate": 1.1094e-05, "loss": 0.211, "step": 3701 }, { "epoch": 20.8, "grad_norm": 0.6507079005241394, "learning_rate": 1.1097e-05, "loss": 0.115, "step": 3702 }, { "epoch": 20.805633802816903, "grad_norm": 0.834831953048706, "learning_rate": 1.11e-05, "loss": 0.178, "step": 3703 }, { "epoch": 20.8112676056338, "grad_norm": 0.7055160403251648, "learning_rate": 1.1103e-05, "loss": 0.1212, "step": 3704 }, { "epoch": 20.816901408450704, "grad_norm": 0.6745755076408386, "learning_rate": 1.1106e-05, "loss": 0.1255, "step": 3705 }, { "epoch": 20.822535211267606, "grad_norm": 0.8690575361251831, "learning_rate": 1.1109000000000002e-05, "loss": 0.1365, "step": 3706 }, { "epoch": 20.82816901408451, "grad_norm": 0.6303755044937134, "learning_rate": 1.1112e-05, "loss": 0.1749, "step": 3707 }, { "epoch": 20.833802816901407, "grad_norm": 1.0297257900238037, "learning_rate": 1.1115e-05, "loss": 0.1754, "step": 3708 }, { "epoch": 20.83943661971831, "grad_norm": 0.5077477693557739, "learning_rate": 1.1118e-05, "loss": 0.0784, "step": 3709 }, { "epoch": 20.845070422535212, "grad_norm": 0.5903599262237549, "learning_rate": 1.1120999999999999e-05, "loss": 0.1117, "step": 3710 }, { "epoch": 20.850704225352114, "grad_norm": 0.6183481216430664, "learning_rate": 1.1124e-05, "loss": 0.103, "step": 3711 }, { "epoch": 20.856338028169013, "grad_norm": 0.7816523313522339, "learning_rate": 1.1127e-05, "loss": 0.1044, "step": 3712 }, { "epoch": 20.861971830985915, "grad_norm": 0.7796207070350647, "learning_rate": 1.113e-05, "loss": 0.0966, "step": 3713 }, { "epoch": 20.867605633802818, "grad_norm": 0.487729549407959, "learning_rate": 1.1133e-05, "loss": 0.1011, "step": 3714 }, { "epoch": 20.87323943661972, "grad_norm": 1.1020913124084473, "learning_rate": 1.1136e-05, "loss": 0.1289, "step": 3715 }, { "epoch": 20.87887323943662, "grad_norm": 0.47091519832611084, "learning_rate": 1.1139000000000001e-05, "loss": 0.0619, "step": 3716 }, { "epoch": 20.88450704225352, "grad_norm": 2.216675281524658, "learning_rate": 1.1142000000000001e-05, "loss": 0.0759, "step": 3717 }, { "epoch": 20.890140845070423, "grad_norm": 0.46310076117515564, "learning_rate": 1.1145000000000001e-05, "loss": 0.0574, "step": 3718 }, { "epoch": 20.895774647887325, "grad_norm": 0.6031591892242432, "learning_rate": 1.1148e-05, "loss": 0.1231, "step": 3719 }, { "epoch": 20.901408450704224, "grad_norm": 0.5030069947242737, "learning_rate": 1.1150999999999999e-05, "loss": 0.0835, "step": 3720 }, { "epoch": 20.907042253521126, "grad_norm": 0.584058403968811, "learning_rate": 1.1154e-05, "loss": 0.0678, "step": 3721 }, { "epoch": 20.91267605633803, "grad_norm": 0.4252653121948242, "learning_rate": 1.1157e-05, "loss": 0.051, "step": 3722 }, { "epoch": 20.91830985915493, "grad_norm": 0.5353942513465881, "learning_rate": 1.116e-05, "loss": 0.0606, "step": 3723 }, { "epoch": 20.92394366197183, "grad_norm": 0.45417261123657227, "learning_rate": 1.1163e-05, "loss": 0.0484, "step": 3724 }, { "epoch": 20.929577464788732, "grad_norm": 0.80482417345047, "learning_rate": 1.1166e-05, "loss": 0.118, "step": 3725 }, { "epoch": 20.935211267605634, "grad_norm": 0.563633918762207, "learning_rate": 1.1169000000000001e-05, "loss": 0.0793, "step": 3726 }, { "epoch": 20.940845070422537, "grad_norm": 0.8113824725151062, "learning_rate": 1.1172e-05, "loss": 0.0765, "step": 3727 }, { "epoch": 20.946478873239435, "grad_norm": 0.640701174736023, "learning_rate": 1.1175e-05, "loss": 0.0711, "step": 3728 }, { "epoch": 20.952112676056338, "grad_norm": 0.5069956183433533, "learning_rate": 1.1178e-05, "loss": 0.0847, "step": 3729 }, { "epoch": 20.95774647887324, "grad_norm": 0.782162070274353, "learning_rate": 1.1181e-05, "loss": 0.0495, "step": 3730 }, { "epoch": 20.963380281690142, "grad_norm": 0.4381646513938904, "learning_rate": 1.1184000000000002e-05, "loss": 0.0424, "step": 3731 }, { "epoch": 20.96901408450704, "grad_norm": 0.5939278602600098, "learning_rate": 1.1187e-05, "loss": 0.07, "step": 3732 }, { "epoch": 20.974647887323943, "grad_norm": 0.6363945007324219, "learning_rate": 1.119e-05, "loss": 0.0589, "step": 3733 }, { "epoch": 20.980281690140846, "grad_norm": 0.7696367502212524, "learning_rate": 1.1193e-05, "loss": 0.0731, "step": 3734 }, { "epoch": 20.985915492957748, "grad_norm": 0.8528777360916138, "learning_rate": 1.1196e-05, "loss": 0.047, "step": 3735 }, { "epoch": 20.991549295774647, "grad_norm": 1.354034662246704, "learning_rate": 1.1199e-05, "loss": 0.069, "step": 3736 }, { "epoch": 20.99718309859155, "grad_norm": 0.7354738116264343, "learning_rate": 1.1202e-05, "loss": 0.1445, "step": 3737 }, { "epoch": 21.0, "grad_norm": 1.488600254058838, "learning_rate": 1.1205e-05, "loss": 0.0326, "step": 3738 }, { "epoch": 21.005633802816902, "grad_norm": 0.8849765658378601, "learning_rate": 1.1208e-05, "loss": 0.2365, "step": 3739 }, { "epoch": 21.011267605633805, "grad_norm": 0.7734532356262207, "learning_rate": 1.1211e-05, "loss": 0.1836, "step": 3740 }, { "epoch": 21.016901408450703, "grad_norm": 0.6671873927116394, "learning_rate": 1.1214000000000001e-05, "loss": 0.2058, "step": 3741 }, { "epoch": 21.022535211267606, "grad_norm": 0.6882656812667847, "learning_rate": 1.1217000000000001e-05, "loss": 0.181, "step": 3742 }, { "epoch": 21.028169014084508, "grad_norm": 0.7428080439567566, "learning_rate": 1.1220000000000001e-05, "loss": 0.136, "step": 3743 }, { "epoch": 21.03380281690141, "grad_norm": 0.5148990154266357, "learning_rate": 1.1222999999999999e-05, "loss": 0.0951, "step": 3744 }, { "epoch": 21.03943661971831, "grad_norm": 0.816633403301239, "learning_rate": 1.1225999999999999e-05, "loss": 0.1379, "step": 3745 }, { "epoch": 21.04507042253521, "grad_norm": 0.7388133406639099, "learning_rate": 1.1229e-05, "loss": 0.1807, "step": 3746 }, { "epoch": 21.050704225352113, "grad_norm": 0.6558136343955994, "learning_rate": 1.1232e-05, "loss": 0.1078, "step": 3747 }, { "epoch": 21.056338028169016, "grad_norm": 0.5741588473320007, "learning_rate": 1.1235e-05, "loss": 0.116, "step": 3748 }, { "epoch": 21.061971830985915, "grad_norm": 0.6439929604530334, "learning_rate": 1.1238e-05, "loss": 0.1316, "step": 3749 }, { "epoch": 21.067605633802817, "grad_norm": 0.7455112934112549, "learning_rate": 1.1241e-05, "loss": 0.13, "step": 3750 }, { "epoch": 21.07323943661972, "grad_norm": 0.7075356841087341, "learning_rate": 1.1244000000000001e-05, "loss": 0.1262, "step": 3751 }, { "epoch": 21.07887323943662, "grad_norm": 0.4720251262187958, "learning_rate": 1.1247000000000001e-05, "loss": 0.0893, "step": 3752 }, { "epoch": 21.08450704225352, "grad_norm": 0.7616335153579712, "learning_rate": 1.125e-05, "loss": 0.153, "step": 3753 }, { "epoch": 21.090140845070422, "grad_norm": 0.5728210210800171, "learning_rate": 1.1253e-05, "loss": 0.1088, "step": 3754 }, { "epoch": 21.095774647887325, "grad_norm": 0.6596336960792542, "learning_rate": 1.1256e-05, "loss": 0.1551, "step": 3755 }, { "epoch": 21.101408450704227, "grad_norm": 0.6671854257583618, "learning_rate": 1.1259e-05, "loss": 0.0924, "step": 3756 }, { "epoch": 21.107042253521126, "grad_norm": 0.7951295375823975, "learning_rate": 1.1262e-05, "loss": 0.0628, "step": 3757 }, { "epoch": 21.112676056338028, "grad_norm": 0.7030835151672363, "learning_rate": 1.1265e-05, "loss": 0.0858, "step": 3758 }, { "epoch": 21.11830985915493, "grad_norm": 0.6062578558921814, "learning_rate": 1.1268e-05, "loss": 0.084, "step": 3759 }, { "epoch": 21.123943661971833, "grad_norm": 0.6047122478485107, "learning_rate": 1.1271e-05, "loss": 0.0782, "step": 3760 }, { "epoch": 21.12957746478873, "grad_norm": 0.6955386400222778, "learning_rate": 1.1274e-05, "loss": 0.1073, "step": 3761 }, { "epoch": 21.135211267605634, "grad_norm": 0.7582312822341919, "learning_rate": 1.1277e-05, "loss": 0.0974, "step": 3762 }, { "epoch": 21.140845070422536, "grad_norm": 0.6923277378082275, "learning_rate": 1.128e-05, "loss": 0.0674, "step": 3763 }, { "epoch": 21.146478873239438, "grad_norm": 0.6649025678634644, "learning_rate": 1.1283e-05, "loss": 0.1091, "step": 3764 }, { "epoch": 21.152112676056337, "grad_norm": 0.4845096170902252, "learning_rate": 1.1286e-05, "loss": 0.0568, "step": 3765 }, { "epoch": 21.15774647887324, "grad_norm": 0.6349717378616333, "learning_rate": 1.1289000000000002e-05, "loss": 0.0638, "step": 3766 }, { "epoch": 21.16338028169014, "grad_norm": 0.5742299556732178, "learning_rate": 1.1292000000000001e-05, "loss": 0.0665, "step": 3767 }, { "epoch": 21.169014084507044, "grad_norm": 0.40679556131362915, "learning_rate": 1.1295000000000001e-05, "loss": 0.1112, "step": 3768 }, { "epoch": 21.174647887323943, "grad_norm": 0.5048012137413025, "learning_rate": 1.1298e-05, "loss": 0.0527, "step": 3769 }, { "epoch": 21.180281690140845, "grad_norm": 0.7323009371757507, "learning_rate": 1.1300999999999999e-05, "loss": 0.1173, "step": 3770 }, { "epoch": 21.185915492957747, "grad_norm": 0.6989848613739014, "learning_rate": 1.1304e-05, "loss": 0.0645, "step": 3771 }, { "epoch": 21.19154929577465, "grad_norm": 1.103499412536621, "learning_rate": 1.1307e-05, "loss": 0.0799, "step": 3772 }, { "epoch": 21.197183098591548, "grad_norm": 1.0266106128692627, "learning_rate": 1.131e-05, "loss": 0.0519, "step": 3773 }, { "epoch": 21.20281690140845, "grad_norm": 0.5059301257133484, "learning_rate": 1.1313e-05, "loss": 0.059, "step": 3774 }, { "epoch": 21.208450704225353, "grad_norm": 0.5831635594367981, "learning_rate": 1.1316e-05, "loss": 0.0685, "step": 3775 }, { "epoch": 21.214084507042255, "grad_norm": 0.4996550381183624, "learning_rate": 1.1319000000000001e-05, "loss": 0.0454, "step": 3776 }, { "epoch": 21.219718309859154, "grad_norm": 0.5587782859802246, "learning_rate": 1.1322000000000001e-05, "loss": 0.0644, "step": 3777 }, { "epoch": 21.225352112676056, "grad_norm": 0.5173588395118713, "learning_rate": 1.1325e-05, "loss": 0.0574, "step": 3778 }, { "epoch": 21.23098591549296, "grad_norm": 0.9421080946922302, "learning_rate": 1.1328e-05, "loss": 0.0591, "step": 3779 }, { "epoch": 21.23661971830986, "grad_norm": 0.6491080522537231, "learning_rate": 1.1331e-05, "loss": 0.0709, "step": 3780 }, { "epoch": 21.24225352112676, "grad_norm": 0.6235758066177368, "learning_rate": 1.1334e-05, "loss": 0.0324, "step": 3781 }, { "epoch": 21.24788732394366, "grad_norm": 0.6971529722213745, "learning_rate": 1.1337e-05, "loss": 0.0397, "step": 3782 }, { "epoch": 21.253521126760564, "grad_norm": 0.7238476872444153, "learning_rate": 1.134e-05, "loss": 0.1885, "step": 3783 }, { "epoch": 21.259154929577466, "grad_norm": 0.8243281245231628, "learning_rate": 1.1343e-05, "loss": 0.2376, "step": 3784 }, { "epoch": 21.264788732394365, "grad_norm": 0.7654474973678589, "learning_rate": 1.1346e-05, "loss": 0.1844, "step": 3785 }, { "epoch": 21.270422535211267, "grad_norm": 1.2622910737991333, "learning_rate": 1.1349000000000001e-05, "loss": 0.2338, "step": 3786 }, { "epoch": 21.27605633802817, "grad_norm": 0.7098395824432373, "learning_rate": 1.1352e-05, "loss": 0.2072, "step": 3787 }, { "epoch": 21.281690140845072, "grad_norm": 0.8847341537475586, "learning_rate": 1.1355e-05, "loss": 0.2116, "step": 3788 }, { "epoch": 21.28732394366197, "grad_norm": 0.6732115745544434, "learning_rate": 1.1358e-05, "loss": 0.1664, "step": 3789 }, { "epoch": 21.292957746478873, "grad_norm": 0.742102861404419, "learning_rate": 1.1361e-05, "loss": 0.1371, "step": 3790 }, { "epoch": 21.298591549295775, "grad_norm": 0.8250037431716919, "learning_rate": 1.1364000000000002e-05, "loss": 0.1995, "step": 3791 }, { "epoch": 21.304225352112677, "grad_norm": 0.6721233129501343, "learning_rate": 1.1367000000000001e-05, "loss": 0.1358, "step": 3792 }, { "epoch": 21.309859154929576, "grad_norm": 0.6768853068351746, "learning_rate": 1.137e-05, "loss": 0.1301, "step": 3793 }, { "epoch": 21.31549295774648, "grad_norm": 0.560245156288147, "learning_rate": 1.1373e-05, "loss": 0.112, "step": 3794 }, { "epoch": 21.32112676056338, "grad_norm": 0.6427155137062073, "learning_rate": 1.1376e-05, "loss": 0.1101, "step": 3795 }, { "epoch": 21.326760563380283, "grad_norm": 1.231948733329773, "learning_rate": 1.1379e-05, "loss": 0.1951, "step": 3796 }, { "epoch": 21.33239436619718, "grad_norm": 0.5751484036445618, "learning_rate": 1.1382e-05, "loss": 0.0912, "step": 3797 }, { "epoch": 21.338028169014084, "grad_norm": 1.1039903163909912, "learning_rate": 1.1385e-05, "loss": 0.0807, "step": 3798 }, { "epoch": 21.343661971830986, "grad_norm": 0.576042890548706, "learning_rate": 1.1388e-05, "loss": 0.0947, "step": 3799 }, { "epoch": 21.34929577464789, "grad_norm": 0.6838948726654053, "learning_rate": 1.1391e-05, "loss": 0.0975, "step": 3800 }, { "epoch": 21.354929577464787, "grad_norm": 1.0859383344650269, "learning_rate": 1.1394000000000001e-05, "loss": 0.1026, "step": 3801 }, { "epoch": 21.36056338028169, "grad_norm": 0.7961111664772034, "learning_rate": 1.1397000000000001e-05, "loss": 0.0955, "step": 3802 }, { "epoch": 21.366197183098592, "grad_norm": 0.5417793989181519, "learning_rate": 1.1400000000000001e-05, "loss": 0.0636, "step": 3803 }, { "epoch": 21.371830985915494, "grad_norm": 0.6017904877662659, "learning_rate": 1.1403e-05, "loss": 0.0799, "step": 3804 }, { "epoch": 21.377464788732393, "grad_norm": 2.828028440475464, "learning_rate": 1.1406e-05, "loss": 0.0869, "step": 3805 }, { "epoch": 21.383098591549295, "grad_norm": 0.7066461443901062, "learning_rate": 1.1409e-05, "loss": 0.0562, "step": 3806 }, { "epoch": 21.388732394366198, "grad_norm": 0.5311230421066284, "learning_rate": 1.1412e-05, "loss": 0.0866, "step": 3807 }, { "epoch": 21.3943661971831, "grad_norm": 0.7344611883163452, "learning_rate": 1.1415e-05, "loss": 0.0736, "step": 3808 }, { "epoch": 21.4, "grad_norm": 0.80890953540802, "learning_rate": 1.1418e-05, "loss": 0.1488, "step": 3809 }, { "epoch": 21.4056338028169, "grad_norm": 0.622484028339386, "learning_rate": 1.1421e-05, "loss": 0.0772, "step": 3810 }, { "epoch": 21.411267605633803, "grad_norm": 0.3973410725593567, "learning_rate": 1.1424000000000001e-05, "loss": 0.0328, "step": 3811 }, { "epoch": 21.416901408450705, "grad_norm": 0.6178673505783081, "learning_rate": 1.1427000000000001e-05, "loss": 0.0564, "step": 3812 }, { "epoch": 21.422535211267604, "grad_norm": 0.7607434391975403, "learning_rate": 1.143e-05, "loss": 0.0937, "step": 3813 }, { "epoch": 21.428169014084506, "grad_norm": 1.1158865690231323, "learning_rate": 1.1433e-05, "loss": 0.1265, "step": 3814 }, { "epoch": 21.43380281690141, "grad_norm": 0.5072949528694153, "learning_rate": 1.1436e-05, "loss": 0.0433, "step": 3815 }, { "epoch": 21.43943661971831, "grad_norm": 0.9031519889831543, "learning_rate": 1.1439e-05, "loss": 0.1006, "step": 3816 }, { "epoch": 21.44507042253521, "grad_norm": 0.49614837765693665, "learning_rate": 1.1442000000000002e-05, "loss": 0.0846, "step": 3817 }, { "epoch": 21.450704225352112, "grad_norm": 0.5385661125183105, "learning_rate": 1.1445e-05, "loss": 0.0406, "step": 3818 }, { "epoch": 21.456338028169014, "grad_norm": 0.48612165451049805, "learning_rate": 1.1448e-05, "loss": 0.0608, "step": 3819 }, { "epoch": 21.461971830985917, "grad_norm": 0.5629063248634338, "learning_rate": 1.1451e-05, "loss": 0.067, "step": 3820 }, { "epoch": 21.467605633802815, "grad_norm": 0.7193580865859985, "learning_rate": 1.1453999999999999e-05, "loss": 0.0433, "step": 3821 }, { "epoch": 21.473239436619718, "grad_norm": 1.0028976202011108, "learning_rate": 1.1457e-05, "loss": 0.1373, "step": 3822 }, { "epoch": 21.47887323943662, "grad_norm": 0.4897672235965729, "learning_rate": 1.146e-05, "loss": 0.0265, "step": 3823 }, { "epoch": 21.484507042253522, "grad_norm": 0.49326303601264954, "learning_rate": 1.1463e-05, "loss": 0.0504, "step": 3824 }, { "epoch": 21.49014084507042, "grad_norm": 1.877926230430603, "learning_rate": 1.1466e-05, "loss": 0.0628, "step": 3825 }, { "epoch": 21.495774647887323, "grad_norm": 0.6297469735145569, "learning_rate": 1.1469e-05, "loss": 0.0447, "step": 3826 }, { "epoch": 21.501408450704226, "grad_norm": 1.2586532831192017, "learning_rate": 1.1472000000000001e-05, "loss": 0.3118, "step": 3827 }, { "epoch": 21.507042253521128, "grad_norm": 0.8124749660491943, "learning_rate": 1.1475000000000001e-05, "loss": 0.1711, "step": 3828 }, { "epoch": 21.512676056338027, "grad_norm": 0.8707528710365295, "learning_rate": 1.1478000000000001e-05, "loss": 0.1635, "step": 3829 }, { "epoch": 21.51830985915493, "grad_norm": 0.8288672566413879, "learning_rate": 1.1480999999999999e-05, "loss": 0.1627, "step": 3830 }, { "epoch": 21.52394366197183, "grad_norm": 0.6908599734306335, "learning_rate": 1.1483999999999999e-05, "loss": 0.1411, "step": 3831 }, { "epoch": 21.529577464788733, "grad_norm": 0.7184497714042664, "learning_rate": 1.1487e-05, "loss": 0.1717, "step": 3832 }, { "epoch": 21.535211267605632, "grad_norm": 0.7284225225448608, "learning_rate": 1.149e-05, "loss": 0.1792, "step": 3833 }, { "epoch": 21.540845070422534, "grad_norm": 0.7330681681632996, "learning_rate": 1.1493e-05, "loss": 0.1274, "step": 3834 }, { "epoch": 21.546478873239437, "grad_norm": 0.7628039121627808, "learning_rate": 1.1496e-05, "loss": 0.1945, "step": 3835 }, { "epoch": 21.55211267605634, "grad_norm": 0.9623438119888306, "learning_rate": 1.1499e-05, "loss": 0.1477, "step": 3836 }, { "epoch": 21.557746478873238, "grad_norm": 0.6079851984977722, "learning_rate": 1.1502000000000001e-05, "loss": 0.1407, "step": 3837 }, { "epoch": 21.56338028169014, "grad_norm": 0.608674943447113, "learning_rate": 1.1505e-05, "loss": 0.1171, "step": 3838 }, { "epoch": 21.569014084507042, "grad_norm": 0.9104487299919128, "learning_rate": 1.1508e-05, "loss": 0.1707, "step": 3839 }, { "epoch": 21.574647887323945, "grad_norm": 0.6092275977134705, "learning_rate": 1.1511e-05, "loss": 0.1066, "step": 3840 }, { "epoch": 21.580281690140843, "grad_norm": 0.5827267169952393, "learning_rate": 1.1514e-05, "loss": 0.1118, "step": 3841 }, { "epoch": 21.585915492957746, "grad_norm": 0.7732173800468445, "learning_rate": 1.1517e-05, "loss": 0.1633, "step": 3842 }, { "epoch": 21.591549295774648, "grad_norm": 0.7264963388442993, "learning_rate": 1.152e-05, "loss": 0.0845, "step": 3843 }, { "epoch": 21.59718309859155, "grad_norm": 0.5513004660606384, "learning_rate": 1.1523e-05, "loss": 0.0696, "step": 3844 }, { "epoch": 21.60281690140845, "grad_norm": 1.0525224208831787, "learning_rate": 1.1526e-05, "loss": 0.1323, "step": 3845 }, { "epoch": 21.60845070422535, "grad_norm": 0.5908690094947815, "learning_rate": 1.1529e-05, "loss": 0.077, "step": 3846 }, { "epoch": 21.614084507042254, "grad_norm": 1.1715190410614014, "learning_rate": 1.1532e-05, "loss": 0.1174, "step": 3847 }, { "epoch": 21.619718309859156, "grad_norm": 1.170559287071228, "learning_rate": 1.1535e-05, "loss": 0.0984, "step": 3848 }, { "epoch": 21.625352112676055, "grad_norm": 0.6217211484909058, "learning_rate": 1.1538e-05, "loss": 0.0746, "step": 3849 }, { "epoch": 21.630985915492957, "grad_norm": 1.0770761966705322, "learning_rate": 1.1541e-05, "loss": 0.0925, "step": 3850 }, { "epoch": 21.63661971830986, "grad_norm": 0.6646056175231934, "learning_rate": 1.1544e-05, "loss": 0.0785, "step": 3851 }, { "epoch": 21.64225352112676, "grad_norm": 0.6238104701042175, "learning_rate": 1.1547000000000001e-05, "loss": 0.0878, "step": 3852 }, { "epoch": 21.647887323943664, "grad_norm": 0.627152681350708, "learning_rate": 1.1550000000000001e-05, "loss": 0.0641, "step": 3853 }, { "epoch": 21.653521126760563, "grad_norm": 0.5339759588241577, "learning_rate": 1.1553000000000001e-05, "loss": 0.0397, "step": 3854 }, { "epoch": 21.659154929577465, "grad_norm": 0.4468303918838501, "learning_rate": 1.1555999999999999e-05, "loss": 0.0446, "step": 3855 }, { "epoch": 21.664788732394367, "grad_norm": 0.5039489269256592, "learning_rate": 1.1558999999999999e-05, "loss": 0.0759, "step": 3856 }, { "epoch": 21.670422535211266, "grad_norm": 0.5679898262023926, "learning_rate": 1.1562e-05, "loss": 0.0582, "step": 3857 }, { "epoch": 21.676056338028168, "grad_norm": 0.6349469423294067, "learning_rate": 1.1565e-05, "loss": 0.0538, "step": 3858 }, { "epoch": 21.68169014084507, "grad_norm": 0.39095911383628845, "learning_rate": 1.1568e-05, "loss": 0.0425, "step": 3859 }, { "epoch": 21.687323943661973, "grad_norm": 0.5567134618759155, "learning_rate": 1.1571e-05, "loss": 0.1036, "step": 3860 }, { "epoch": 21.692957746478875, "grad_norm": 0.9637419581413269, "learning_rate": 1.1574e-05, "loss": 0.0604, "step": 3861 }, { "epoch": 21.698591549295774, "grad_norm": 1.057702660560608, "learning_rate": 1.1577000000000001e-05, "loss": 0.1083, "step": 3862 }, { "epoch": 21.704225352112676, "grad_norm": 0.686822772026062, "learning_rate": 1.1580000000000001e-05, "loss": 0.0945, "step": 3863 }, { "epoch": 21.70985915492958, "grad_norm": 0.483414888381958, "learning_rate": 1.1583e-05, "loss": 0.0411, "step": 3864 }, { "epoch": 21.71549295774648, "grad_norm": 0.7184618711471558, "learning_rate": 1.1586e-05, "loss": 0.0636, "step": 3865 }, { "epoch": 21.72112676056338, "grad_norm": 0.6953598856925964, "learning_rate": 1.1589e-05, "loss": 0.087, "step": 3866 }, { "epoch": 21.72676056338028, "grad_norm": 0.6672613620758057, "learning_rate": 1.1592e-05, "loss": 0.0344, "step": 3867 }, { "epoch": 21.732394366197184, "grad_norm": 0.3723028600215912, "learning_rate": 1.1595e-05, "loss": 0.0604, "step": 3868 }, { "epoch": 21.738028169014086, "grad_norm": 1.0172349214553833, "learning_rate": 1.1598e-05, "loss": 0.0393, "step": 3869 }, { "epoch": 21.743661971830985, "grad_norm": 1.280462384223938, "learning_rate": 1.1601e-05, "loss": 0.1103, "step": 3870 }, { "epoch": 21.749295774647887, "grad_norm": 1.3911575078964233, "learning_rate": 1.1604e-05, "loss": 0.1975, "step": 3871 }, { "epoch": 21.75492957746479, "grad_norm": 0.7050368189811707, "learning_rate": 1.1607000000000001e-05, "loss": 0.1636, "step": 3872 }, { "epoch": 21.760563380281692, "grad_norm": 0.7532088756561279, "learning_rate": 1.161e-05, "loss": 0.1601, "step": 3873 }, { "epoch": 21.76619718309859, "grad_norm": 0.6457635164260864, "learning_rate": 1.1613e-05, "loss": 0.1725, "step": 3874 }, { "epoch": 21.771830985915493, "grad_norm": 0.9462829232215881, "learning_rate": 1.1616e-05, "loss": 0.1649, "step": 3875 }, { "epoch": 21.777464788732395, "grad_norm": 0.7943126559257507, "learning_rate": 1.1619e-05, "loss": 0.1442, "step": 3876 }, { "epoch": 21.783098591549297, "grad_norm": 1.0351535081863403, "learning_rate": 1.1622000000000002e-05, "loss": 0.1829, "step": 3877 }, { "epoch": 21.788732394366196, "grad_norm": 0.8526789546012878, "learning_rate": 1.1625000000000001e-05, "loss": 0.1391, "step": 3878 }, { "epoch": 21.7943661971831, "grad_norm": 0.6768996119499207, "learning_rate": 1.1628e-05, "loss": 0.1192, "step": 3879 }, { "epoch": 21.8, "grad_norm": 0.5898879170417786, "learning_rate": 1.1631e-05, "loss": 0.095, "step": 3880 }, { "epoch": 21.805633802816903, "grad_norm": 0.7711986899375916, "learning_rate": 1.1633999999999999e-05, "loss": 0.1469, "step": 3881 }, { "epoch": 21.8112676056338, "grad_norm": 0.6065319180488586, "learning_rate": 1.1637e-05, "loss": 0.1039, "step": 3882 }, { "epoch": 21.816901408450704, "grad_norm": 0.6933140754699707, "learning_rate": 1.164e-05, "loss": 0.134, "step": 3883 }, { "epoch": 21.822535211267606, "grad_norm": 0.5252668857574463, "learning_rate": 1.1643e-05, "loss": 0.1171, "step": 3884 }, { "epoch": 21.82816901408451, "grad_norm": 0.7086127996444702, "learning_rate": 1.1646e-05, "loss": 0.0911, "step": 3885 }, { "epoch": 21.833802816901407, "grad_norm": 1.0139069557189941, "learning_rate": 1.1649e-05, "loss": 0.0886, "step": 3886 }, { "epoch": 21.83943661971831, "grad_norm": 0.6582956910133362, "learning_rate": 1.1652000000000001e-05, "loss": 0.0956, "step": 3887 }, { "epoch": 21.845070422535212, "grad_norm": 0.5482489466667175, "learning_rate": 1.1655000000000001e-05, "loss": 0.0875, "step": 3888 }, { "epoch": 21.850704225352114, "grad_norm": 0.63651043176651, "learning_rate": 1.1658000000000001e-05, "loss": 0.0835, "step": 3889 }, { "epoch": 21.856338028169013, "grad_norm": 0.501788854598999, "learning_rate": 1.1661e-05, "loss": 0.0665, "step": 3890 }, { "epoch": 21.861971830985915, "grad_norm": 1.6913992166519165, "learning_rate": 1.1664e-05, "loss": 0.1508, "step": 3891 }, { "epoch": 21.867605633802818, "grad_norm": 0.6015134453773499, "learning_rate": 1.1667e-05, "loss": 0.1096, "step": 3892 }, { "epoch": 21.87323943661972, "grad_norm": 0.9991927742958069, "learning_rate": 1.167e-05, "loss": 0.1113, "step": 3893 }, { "epoch": 21.87887323943662, "grad_norm": 0.7639583349227905, "learning_rate": 1.1673e-05, "loss": 0.0677, "step": 3894 }, { "epoch": 21.88450704225352, "grad_norm": 0.520096480846405, "learning_rate": 1.1676e-05, "loss": 0.0563, "step": 3895 }, { "epoch": 21.890140845070423, "grad_norm": 0.6350600719451904, "learning_rate": 1.1679e-05, "loss": 0.076, "step": 3896 }, { "epoch": 21.895774647887325, "grad_norm": 1.3344407081604004, "learning_rate": 1.1682000000000001e-05, "loss": 0.0712, "step": 3897 }, { "epoch": 21.901408450704224, "grad_norm": 0.7441964745521545, "learning_rate": 1.1685e-05, "loss": 0.0779, "step": 3898 }, { "epoch": 21.907042253521126, "grad_norm": 0.8134579658508301, "learning_rate": 1.1688e-05, "loss": 0.0579, "step": 3899 }, { "epoch": 21.91267605633803, "grad_norm": 0.5868893265724182, "learning_rate": 1.1691e-05, "loss": 0.0989, "step": 3900 }, { "epoch": 21.91830985915493, "grad_norm": 0.6715525388717651, "learning_rate": 1.1694e-05, "loss": 0.0635, "step": 3901 }, { "epoch": 21.92394366197183, "grad_norm": 0.7406333088874817, "learning_rate": 1.1697000000000002e-05, "loss": 0.0538, "step": 3902 }, { "epoch": 21.929577464788732, "grad_norm": 2.6579840183258057, "learning_rate": 1.1700000000000001e-05, "loss": 0.1144, "step": 3903 }, { "epoch": 21.935211267605634, "grad_norm": 0.43247586488723755, "learning_rate": 1.1703e-05, "loss": 0.0776, "step": 3904 }, { "epoch": 21.940845070422537, "grad_norm": 0.6085285544395447, "learning_rate": 1.1706e-05, "loss": 0.0461, "step": 3905 }, { "epoch": 21.946478873239435, "grad_norm": 0.7322105765342712, "learning_rate": 1.1709e-05, "loss": 0.1046, "step": 3906 }, { "epoch": 21.952112676056338, "grad_norm": 1.1375722885131836, "learning_rate": 1.1712e-05, "loss": 0.083, "step": 3907 }, { "epoch": 21.95774647887324, "grad_norm": 0.6160619854927063, "learning_rate": 1.1715e-05, "loss": 0.0405, "step": 3908 }, { "epoch": 21.963380281690142, "grad_norm": 0.6196191310882568, "learning_rate": 1.1718e-05, "loss": 0.0738, "step": 3909 }, { "epoch": 21.96901408450704, "grad_norm": 0.5900743007659912, "learning_rate": 1.1721e-05, "loss": 0.0289, "step": 3910 }, { "epoch": 21.974647887323943, "grad_norm": 0.5745940804481506, "learning_rate": 1.1724e-05, "loss": 0.0531, "step": 3911 }, { "epoch": 21.980281690140846, "grad_norm": 0.5559342503547668, "learning_rate": 1.1727000000000001e-05, "loss": 0.0856, "step": 3912 }, { "epoch": 21.985915492957748, "grad_norm": 0.7279943227767944, "learning_rate": 1.1730000000000001e-05, "loss": 0.0951, "step": 3913 }, { "epoch": 21.991549295774647, "grad_norm": 0.6898196339607239, "learning_rate": 1.1733000000000001e-05, "loss": 0.0369, "step": 3914 }, { "epoch": 21.99718309859155, "grad_norm": 0.849134624004364, "learning_rate": 1.1736e-05, "loss": 0.1046, "step": 3915 }, { "epoch": 22.0, "grad_norm": 0.84903883934021, "learning_rate": 1.1738999999999999e-05, "loss": 0.0362, "step": 3916 }, { "epoch": 22.005633802816902, "grad_norm": 0.8349356055259705, "learning_rate": 1.1742e-05, "loss": 0.1796, "step": 3917 }, { "epoch": 22.011267605633805, "grad_norm": 0.9057911038398743, "learning_rate": 1.1745e-05, "loss": 0.1711, "step": 3918 }, { "epoch": 22.016901408450703, "grad_norm": 0.6767787933349609, "learning_rate": 1.1748e-05, "loss": 0.1422, "step": 3919 }, { "epoch": 22.022535211267606, "grad_norm": 0.6853015422821045, "learning_rate": 1.1751e-05, "loss": 0.1384, "step": 3920 }, { "epoch": 22.028169014084508, "grad_norm": 0.7287697196006775, "learning_rate": 1.1754e-05, "loss": 0.1286, "step": 3921 }, { "epoch": 22.03380281690141, "grad_norm": 0.7060514688491821, "learning_rate": 1.1757000000000001e-05, "loss": 0.1411, "step": 3922 }, { "epoch": 22.03943661971831, "grad_norm": 0.6600307822227478, "learning_rate": 1.1760000000000001e-05, "loss": 0.1159, "step": 3923 }, { "epoch": 22.04507042253521, "grad_norm": 1.2075175046920776, "learning_rate": 1.1763e-05, "loss": 0.1806, "step": 3924 }, { "epoch": 22.050704225352113, "grad_norm": 0.6061028242111206, "learning_rate": 1.1766e-05, "loss": 0.0996, "step": 3925 }, { "epoch": 22.056338028169016, "grad_norm": 0.6363815069198608, "learning_rate": 1.1769e-05, "loss": 0.121, "step": 3926 }, { "epoch": 22.061971830985915, "grad_norm": 0.6133226752281189, "learning_rate": 1.1772000000000002e-05, "loss": 0.118, "step": 3927 }, { "epoch": 22.067605633802817, "grad_norm": 0.6295463442802429, "learning_rate": 1.1775000000000002e-05, "loss": 0.1107, "step": 3928 }, { "epoch": 22.07323943661972, "grad_norm": 0.7518165111541748, "learning_rate": 1.1778e-05, "loss": 0.091, "step": 3929 }, { "epoch": 22.07887323943662, "grad_norm": 0.520511269569397, "learning_rate": 1.1781e-05, "loss": 0.1133, "step": 3930 }, { "epoch": 22.08450704225352, "grad_norm": 0.5835321545600891, "learning_rate": 1.1784e-05, "loss": 0.0907, "step": 3931 }, { "epoch": 22.090140845070422, "grad_norm": 0.6984267830848694, "learning_rate": 1.1787e-05, "loss": 0.1372, "step": 3932 }, { "epoch": 22.095774647887325, "grad_norm": 0.7085666060447693, "learning_rate": 1.179e-05, "loss": 0.0869, "step": 3933 }, { "epoch": 22.101408450704227, "grad_norm": 0.7139620184898376, "learning_rate": 1.1793e-05, "loss": 0.0873, "step": 3934 }, { "epoch": 22.107042253521126, "grad_norm": 1.1236885786056519, "learning_rate": 1.1796e-05, "loss": 0.1031, "step": 3935 }, { "epoch": 22.112676056338028, "grad_norm": 0.6956385374069214, "learning_rate": 1.1799e-05, "loss": 0.0753, "step": 3936 }, { "epoch": 22.11830985915493, "grad_norm": 0.7081955075263977, "learning_rate": 1.1802000000000002e-05, "loss": 0.1269, "step": 3937 }, { "epoch": 22.123943661971833, "grad_norm": 0.5294609069824219, "learning_rate": 1.1805000000000001e-05, "loss": 0.0477, "step": 3938 }, { "epoch": 22.12957746478873, "grad_norm": 0.49267539381980896, "learning_rate": 1.1808000000000001e-05, "loss": 0.0708, "step": 3939 }, { "epoch": 22.135211267605634, "grad_norm": 0.9730096459388733, "learning_rate": 1.1811000000000001e-05, "loss": 0.0689, "step": 3940 }, { "epoch": 22.140845070422536, "grad_norm": 0.6945432424545288, "learning_rate": 1.1813999999999999e-05, "loss": 0.0924, "step": 3941 }, { "epoch": 22.146478873239438, "grad_norm": 0.5733911991119385, "learning_rate": 1.1816999999999999e-05, "loss": 0.0747, "step": 3942 }, { "epoch": 22.152112676056337, "grad_norm": 0.7317754030227661, "learning_rate": 1.182e-05, "loss": 0.0931, "step": 3943 }, { "epoch": 22.15774647887324, "grad_norm": 0.8333809971809387, "learning_rate": 1.1823e-05, "loss": 0.0662, "step": 3944 }, { "epoch": 22.16338028169014, "grad_norm": 0.535469114780426, "learning_rate": 1.1826e-05, "loss": 0.0533, "step": 3945 }, { "epoch": 22.169014084507044, "grad_norm": 0.567329466342926, "learning_rate": 1.1829e-05, "loss": 0.0571, "step": 3946 }, { "epoch": 22.174647887323943, "grad_norm": 0.5132856369018555, "learning_rate": 1.1832e-05, "loss": 0.0461, "step": 3947 }, { "epoch": 22.180281690140845, "grad_norm": 0.7198354005813599, "learning_rate": 1.1835000000000001e-05, "loss": 0.0501, "step": 3948 }, { "epoch": 22.185915492957747, "grad_norm": 0.4497773349285126, "learning_rate": 1.1838e-05, "loss": 0.0506, "step": 3949 }, { "epoch": 22.19154929577465, "grad_norm": 0.8431933522224426, "learning_rate": 1.1841e-05, "loss": 0.0526, "step": 3950 }, { "epoch": 22.197183098591548, "grad_norm": 0.5540543794631958, "learning_rate": 1.1844e-05, "loss": 0.0918, "step": 3951 }, { "epoch": 22.20281690140845, "grad_norm": 0.768166720867157, "learning_rate": 1.1847e-05, "loss": 0.071, "step": 3952 }, { "epoch": 22.208450704225353, "grad_norm": 0.7997515797615051, "learning_rate": 1.185e-05, "loss": 0.0911, "step": 3953 }, { "epoch": 22.214084507042255, "grad_norm": 0.40957382321357727, "learning_rate": 1.1853e-05, "loss": 0.0595, "step": 3954 }, { "epoch": 22.219718309859154, "grad_norm": 0.6145085096359253, "learning_rate": 1.1856e-05, "loss": 0.0525, "step": 3955 }, { "epoch": 22.225352112676056, "grad_norm": 0.5681316256523132, "learning_rate": 1.1859e-05, "loss": 0.0682, "step": 3956 }, { "epoch": 22.23098591549296, "grad_norm": 1.2168115377426147, "learning_rate": 1.1862e-05, "loss": 0.0436, "step": 3957 }, { "epoch": 22.23661971830986, "grad_norm": 0.367465615272522, "learning_rate": 1.1865e-05, "loss": 0.03, "step": 3958 }, { "epoch": 22.24225352112676, "grad_norm": 1.182226538658142, "learning_rate": 1.1868e-05, "loss": 0.1065, "step": 3959 }, { "epoch": 22.24788732394366, "grad_norm": 0.7206059694290161, "learning_rate": 1.1871e-05, "loss": 0.066, "step": 3960 }, { "epoch": 22.253521126760564, "grad_norm": 0.688542902469635, "learning_rate": 1.1874e-05, "loss": 0.2208, "step": 3961 }, { "epoch": 22.259154929577466, "grad_norm": 0.6734368205070496, "learning_rate": 1.1877e-05, "loss": 0.2129, "step": 3962 }, { "epoch": 22.264788732394365, "grad_norm": 0.7407608032226562, "learning_rate": 1.1880000000000001e-05, "loss": 0.1697, "step": 3963 }, { "epoch": 22.270422535211267, "grad_norm": 0.7899647951126099, "learning_rate": 1.1883000000000001e-05, "loss": 0.181, "step": 3964 }, { "epoch": 22.27605633802817, "grad_norm": 0.6320874691009521, "learning_rate": 1.1886e-05, "loss": 0.2049, "step": 3965 }, { "epoch": 22.281690140845072, "grad_norm": 1.1420775651931763, "learning_rate": 1.1889e-05, "loss": 0.1931, "step": 3966 }, { "epoch": 22.28732394366197, "grad_norm": 0.6930124759674072, "learning_rate": 1.1891999999999999e-05, "loss": 0.1826, "step": 3967 }, { "epoch": 22.292957746478873, "grad_norm": 0.5907472968101501, "learning_rate": 1.1895e-05, "loss": 0.1035, "step": 3968 }, { "epoch": 22.298591549295775, "grad_norm": 0.8170567750930786, "learning_rate": 1.1898e-05, "loss": 0.1437, "step": 3969 }, { "epoch": 22.304225352112677, "grad_norm": 0.6437286734580994, "learning_rate": 1.1901e-05, "loss": 0.0988, "step": 3970 }, { "epoch": 22.309859154929576, "grad_norm": 0.6603325605392456, "learning_rate": 1.1904e-05, "loss": 0.1455, "step": 3971 }, { "epoch": 22.31549295774648, "grad_norm": 0.704154908657074, "learning_rate": 1.1907e-05, "loss": 0.1198, "step": 3972 }, { "epoch": 22.32112676056338, "grad_norm": 0.8062414526939392, "learning_rate": 1.1910000000000001e-05, "loss": 0.1989, "step": 3973 }, { "epoch": 22.326760563380283, "grad_norm": 0.5955081582069397, "learning_rate": 1.1913000000000001e-05, "loss": 0.0608, "step": 3974 }, { "epoch": 22.33239436619718, "grad_norm": 0.7071753144264221, "learning_rate": 1.1916e-05, "loss": 0.14, "step": 3975 }, { "epoch": 22.338028169014084, "grad_norm": 0.5541832447052002, "learning_rate": 1.1919e-05, "loss": 0.128, "step": 3976 }, { "epoch": 22.343661971830986, "grad_norm": 0.6838138103485107, "learning_rate": 1.1922e-05, "loss": 0.0868, "step": 3977 }, { "epoch": 22.34929577464789, "grad_norm": 0.6479970812797546, "learning_rate": 1.1925e-05, "loss": 0.1008, "step": 3978 }, { "epoch": 22.354929577464787, "grad_norm": 0.6261606216430664, "learning_rate": 1.1928e-05, "loss": 0.0966, "step": 3979 }, { "epoch": 22.36056338028169, "grad_norm": 0.5135319828987122, "learning_rate": 1.1931e-05, "loss": 0.0719, "step": 3980 }, { "epoch": 22.366197183098592, "grad_norm": 0.7995685935020447, "learning_rate": 1.1934e-05, "loss": 0.1118, "step": 3981 }, { "epoch": 22.371830985915494, "grad_norm": 0.65648353099823, "learning_rate": 1.1937e-05, "loss": 0.0833, "step": 3982 }, { "epoch": 22.377464788732393, "grad_norm": 0.5095494389533997, "learning_rate": 1.1940000000000001e-05, "loss": 0.0798, "step": 3983 }, { "epoch": 22.383098591549295, "grad_norm": 0.684410572052002, "learning_rate": 1.1943e-05, "loss": 0.0671, "step": 3984 }, { "epoch": 22.388732394366198, "grad_norm": 0.797908365726471, "learning_rate": 1.1946e-05, "loss": 0.0541, "step": 3985 }, { "epoch": 22.3943661971831, "grad_norm": 0.44620954990386963, "learning_rate": 1.1949e-05, "loss": 0.0534, "step": 3986 }, { "epoch": 22.4, "grad_norm": 0.6514051556587219, "learning_rate": 1.1952e-05, "loss": 0.0617, "step": 3987 }, { "epoch": 22.4056338028169, "grad_norm": 0.39281201362609863, "learning_rate": 1.1955000000000002e-05, "loss": 0.0363, "step": 3988 }, { "epoch": 22.411267605633803, "grad_norm": 0.5868716835975647, "learning_rate": 1.1958000000000001e-05, "loss": 0.0694, "step": 3989 }, { "epoch": 22.416901408450705, "grad_norm": 0.5259824991226196, "learning_rate": 1.1961e-05, "loss": 0.1118, "step": 3990 }, { "epoch": 22.422535211267604, "grad_norm": 0.5172989368438721, "learning_rate": 1.1964e-05, "loss": 0.038, "step": 3991 }, { "epoch": 22.428169014084506, "grad_norm": 1.0657265186309814, "learning_rate": 1.1966999999999999e-05, "loss": 0.1647, "step": 3992 }, { "epoch": 22.43380281690141, "grad_norm": 0.44234827160835266, "learning_rate": 1.197e-05, "loss": 0.0343, "step": 3993 }, { "epoch": 22.43943661971831, "grad_norm": 0.5915202498435974, "learning_rate": 1.1973e-05, "loss": 0.0523, "step": 3994 }, { "epoch": 22.44507042253521, "grad_norm": 0.505346953868866, "learning_rate": 1.1976e-05, "loss": 0.0516, "step": 3995 }, { "epoch": 22.450704225352112, "grad_norm": 0.8012747168540955, "learning_rate": 1.1979e-05, "loss": 0.0567, "step": 3996 }, { "epoch": 22.456338028169014, "grad_norm": 0.603287935256958, "learning_rate": 1.1982e-05, "loss": 0.0545, "step": 3997 }, { "epoch": 22.461971830985917, "grad_norm": 0.5426540374755859, "learning_rate": 1.1985000000000001e-05, "loss": 0.0533, "step": 3998 }, { "epoch": 22.467605633802815, "grad_norm": 0.6642106771469116, "learning_rate": 1.1988000000000001e-05, "loss": 0.0504, "step": 3999 }, { "epoch": 22.473239436619718, "grad_norm": 0.4482876658439636, "learning_rate": 1.1991000000000001e-05, "loss": 0.0417, "step": 4000 }, { "epoch": 22.473239436619718, "eval_cer": 0.11486118671747414, "eval_loss": 0.3389103412628174, "eval_runtime": 16.3892, "eval_samples_per_second": 18.549, "eval_steps_per_second": 0.61, "eval_wer": 0.40464313123561013, "step": 4000 }, { "epoch": 22.47887323943662, "grad_norm": 0.6729024052619934, "learning_rate": 1.1994e-05, "loss": 0.0345, "step": 4001 }, { "epoch": 22.484507042253522, "grad_norm": 0.7761999368667603, "learning_rate": 1.1996999999999999e-05, "loss": 0.0487, "step": 4002 }, { "epoch": 22.49014084507042, "grad_norm": 0.546999990940094, "learning_rate": 1.2e-05, "loss": 0.0641, "step": 4003 }, { "epoch": 22.495774647887323, "grad_norm": 0.5976518988609314, "learning_rate": 1.2003e-05, "loss": 0.0687, "step": 4004 }, { "epoch": 22.501408450704226, "grad_norm": 0.7777948975563049, "learning_rate": 1.2006e-05, "loss": 0.2337, "step": 4005 }, { "epoch": 22.507042253521128, "grad_norm": 0.7198633551597595, "learning_rate": 1.2009e-05, "loss": 0.1512, "step": 4006 }, { "epoch": 22.512676056338027, "grad_norm": 0.6786754131317139, "learning_rate": 1.2012e-05, "loss": 0.1435, "step": 4007 }, { "epoch": 22.51830985915493, "grad_norm": 0.817950963973999, "learning_rate": 1.2015000000000001e-05, "loss": 0.2059, "step": 4008 }, { "epoch": 22.52394366197183, "grad_norm": 0.7130279541015625, "learning_rate": 1.2018e-05, "loss": 0.1626, "step": 4009 }, { "epoch": 22.529577464788733, "grad_norm": 0.8479874134063721, "learning_rate": 1.2021e-05, "loss": 0.1306, "step": 4010 }, { "epoch": 22.535211267605632, "grad_norm": 0.6765828132629395, "learning_rate": 1.2024e-05, "loss": 0.1142, "step": 4011 }, { "epoch": 22.540845070422534, "grad_norm": 0.7167558073997498, "learning_rate": 1.2027e-05, "loss": 0.1407, "step": 4012 }, { "epoch": 22.546478873239437, "grad_norm": 0.7593284845352173, "learning_rate": 1.2030000000000002e-05, "loss": 0.147, "step": 4013 }, { "epoch": 22.55211267605634, "grad_norm": 0.6791000962257385, "learning_rate": 1.2033000000000002e-05, "loss": 0.135, "step": 4014 }, { "epoch": 22.557746478873238, "grad_norm": 0.8383356332778931, "learning_rate": 1.2036e-05, "loss": 0.1507, "step": 4015 }, { "epoch": 22.56338028169014, "grad_norm": 0.5727760195732117, "learning_rate": 1.2039e-05, "loss": 0.1081, "step": 4016 }, { "epoch": 22.569014084507042, "grad_norm": 0.6725924611091614, "learning_rate": 1.2042e-05, "loss": 0.1054, "step": 4017 }, { "epoch": 22.574647887323945, "grad_norm": 1.1630315780639648, "learning_rate": 1.2045e-05, "loss": 0.136, "step": 4018 }, { "epoch": 22.580281690140843, "grad_norm": 0.5956259965896606, "learning_rate": 1.2048e-05, "loss": 0.1095, "step": 4019 }, { "epoch": 22.585915492957746, "grad_norm": 0.6707935333251953, "learning_rate": 1.2051e-05, "loss": 0.0804, "step": 4020 }, { "epoch": 22.591549295774648, "grad_norm": 0.5328925848007202, "learning_rate": 1.2054e-05, "loss": 0.0788, "step": 4021 }, { "epoch": 22.59718309859155, "grad_norm": 0.57720547914505, "learning_rate": 1.2057e-05, "loss": 0.078, "step": 4022 }, { "epoch": 22.60281690140845, "grad_norm": 0.6690756678581238, "learning_rate": 1.2060000000000001e-05, "loss": 0.0969, "step": 4023 }, { "epoch": 22.60845070422535, "grad_norm": 0.4485354721546173, "learning_rate": 1.2063000000000001e-05, "loss": 0.0737, "step": 4024 }, { "epoch": 22.614084507042254, "grad_norm": 0.5178638100624084, "learning_rate": 1.2066000000000001e-05, "loss": 0.0815, "step": 4025 }, { "epoch": 22.619718309859156, "grad_norm": 0.63835209608078, "learning_rate": 1.2069e-05, "loss": 0.1031, "step": 4026 }, { "epoch": 22.625352112676055, "grad_norm": 0.5076093673706055, "learning_rate": 1.2071999999999999e-05, "loss": 0.1195, "step": 4027 }, { "epoch": 22.630985915492957, "grad_norm": 0.516040563583374, "learning_rate": 1.2075e-05, "loss": 0.0759, "step": 4028 }, { "epoch": 22.63661971830986, "grad_norm": 0.5003854632377625, "learning_rate": 1.2078e-05, "loss": 0.058, "step": 4029 }, { "epoch": 22.64225352112676, "grad_norm": 0.5678958892822266, "learning_rate": 1.2081e-05, "loss": 0.0586, "step": 4030 }, { "epoch": 22.647887323943664, "grad_norm": 0.7465324401855469, "learning_rate": 1.2084e-05, "loss": 0.1395, "step": 4031 }, { "epoch": 22.653521126760563, "grad_norm": 0.5314163565635681, "learning_rate": 1.2087e-05, "loss": 0.0702, "step": 4032 }, { "epoch": 22.659154929577465, "grad_norm": 0.5303806066513062, "learning_rate": 1.2090000000000001e-05, "loss": 0.0469, "step": 4033 }, { "epoch": 22.664788732394367, "grad_norm": 0.7228844165802002, "learning_rate": 1.2093000000000001e-05, "loss": 0.0941, "step": 4034 }, { "epoch": 22.670422535211266, "grad_norm": 0.6143317222595215, "learning_rate": 1.2096e-05, "loss": 0.0542, "step": 4035 }, { "epoch": 22.676056338028168, "grad_norm": 0.9915088415145874, "learning_rate": 1.2099e-05, "loss": 0.051, "step": 4036 }, { "epoch": 22.68169014084507, "grad_norm": 0.5102118849754333, "learning_rate": 1.2102e-05, "loss": 0.084, "step": 4037 }, { "epoch": 22.687323943661973, "grad_norm": 1.0550858974456787, "learning_rate": 1.2105000000000002e-05, "loss": 0.1684, "step": 4038 }, { "epoch": 22.692957746478875, "grad_norm": 0.5627277493476868, "learning_rate": 1.2108e-05, "loss": 0.0431, "step": 4039 }, { "epoch": 22.698591549295774, "grad_norm": 0.7350191473960876, "learning_rate": 1.2111e-05, "loss": 0.0793, "step": 4040 }, { "epoch": 22.704225352112676, "grad_norm": 0.4741433262825012, "learning_rate": 1.2114e-05, "loss": 0.0474, "step": 4041 }, { "epoch": 22.70985915492958, "grad_norm": 1.2094430923461914, "learning_rate": 1.2117e-05, "loss": 0.069, "step": 4042 }, { "epoch": 22.71549295774648, "grad_norm": 0.5451456904411316, "learning_rate": 1.2120000000000001e-05, "loss": 0.0416, "step": 4043 }, { "epoch": 22.72112676056338, "grad_norm": 3.0460290908813477, "learning_rate": 1.2123e-05, "loss": 0.1021, "step": 4044 }, { "epoch": 22.72676056338028, "grad_norm": 0.6162716150283813, "learning_rate": 1.2126e-05, "loss": 0.0374, "step": 4045 }, { "epoch": 22.732394366197184, "grad_norm": 1.0042173862457275, "learning_rate": 1.2129e-05, "loss": 0.1079, "step": 4046 }, { "epoch": 22.738028169014086, "grad_norm": 0.999747097492218, "learning_rate": 1.2132e-05, "loss": 0.0389, "step": 4047 }, { "epoch": 22.743661971830985, "grad_norm": 0.6431294679641724, "learning_rate": 1.2135000000000002e-05, "loss": 0.0272, "step": 4048 }, { "epoch": 22.749295774647887, "grad_norm": 0.8340325951576233, "learning_rate": 1.2138000000000001e-05, "loss": 0.2123, "step": 4049 }, { "epoch": 22.75492957746479, "grad_norm": 0.775688886642456, "learning_rate": 1.2141000000000001e-05, "loss": 0.1858, "step": 4050 }, { "epoch": 22.760563380281692, "grad_norm": 0.7551960349082947, "learning_rate": 1.2144e-05, "loss": 0.1907, "step": 4051 }, { "epoch": 22.76619718309859, "grad_norm": 0.7959131598472595, "learning_rate": 1.2146999999999999e-05, "loss": 0.1682, "step": 4052 }, { "epoch": 22.771830985915493, "grad_norm": 0.7676827311515808, "learning_rate": 1.215e-05, "loss": 0.1211, "step": 4053 }, { "epoch": 22.777464788732395, "grad_norm": 0.7708531022071838, "learning_rate": 1.2153e-05, "loss": 0.1591, "step": 4054 }, { "epoch": 22.783098591549297, "grad_norm": 0.6245898604393005, "learning_rate": 1.2156e-05, "loss": 0.1326, "step": 4055 }, { "epoch": 22.788732394366196, "grad_norm": 0.6566709876060486, "learning_rate": 1.2159e-05, "loss": 0.183, "step": 4056 }, { "epoch": 22.7943661971831, "grad_norm": 0.6779392957687378, "learning_rate": 1.2162e-05, "loss": 0.1509, "step": 4057 }, { "epoch": 22.8, "grad_norm": 0.5927857756614685, "learning_rate": 1.2165000000000001e-05, "loss": 0.0982, "step": 4058 }, { "epoch": 22.805633802816903, "grad_norm": 0.6589165925979614, "learning_rate": 1.2168000000000001e-05, "loss": 0.1348, "step": 4059 }, { "epoch": 22.8112676056338, "grad_norm": 0.5494717359542847, "learning_rate": 1.2171000000000001e-05, "loss": 0.1067, "step": 4060 }, { "epoch": 22.816901408450704, "grad_norm": 0.6772322654724121, "learning_rate": 1.2174e-05, "loss": 0.1526, "step": 4061 }, { "epoch": 22.822535211267606, "grad_norm": 0.6951221227645874, "learning_rate": 1.2177e-05, "loss": 0.119, "step": 4062 }, { "epoch": 22.82816901408451, "grad_norm": 0.48261964321136475, "learning_rate": 1.2180000000000002e-05, "loss": 0.1005, "step": 4063 }, { "epoch": 22.833802816901407, "grad_norm": 0.7543203830718994, "learning_rate": 1.2183e-05, "loss": 0.0871, "step": 4064 }, { "epoch": 22.83943661971831, "grad_norm": 0.512039840221405, "learning_rate": 1.2186e-05, "loss": 0.1023, "step": 4065 }, { "epoch": 22.845070422535212, "grad_norm": 0.7598488330841064, "learning_rate": 1.2189e-05, "loss": 0.1103, "step": 4066 }, { "epoch": 22.850704225352114, "grad_norm": 0.6169450283050537, "learning_rate": 1.2192e-05, "loss": 0.0553, "step": 4067 }, { "epoch": 22.856338028169013, "grad_norm": 0.5265209078788757, "learning_rate": 1.2195e-05, "loss": 0.0747, "step": 4068 }, { "epoch": 22.861971830985915, "grad_norm": 0.5728177428245544, "learning_rate": 1.2198e-05, "loss": 0.0774, "step": 4069 }, { "epoch": 22.867605633802818, "grad_norm": 0.5584242343902588, "learning_rate": 1.2201e-05, "loss": 0.0777, "step": 4070 }, { "epoch": 22.87323943661972, "grad_norm": 0.5636805295944214, "learning_rate": 1.2204e-05, "loss": 0.1063, "step": 4071 }, { "epoch": 22.87887323943662, "grad_norm": 0.733416736125946, "learning_rate": 1.2207e-05, "loss": 0.0809, "step": 4072 }, { "epoch": 22.88450704225352, "grad_norm": 0.629127025604248, "learning_rate": 1.221e-05, "loss": 0.0691, "step": 4073 }, { "epoch": 22.890140845070423, "grad_norm": 0.6852758526802063, "learning_rate": 1.2213000000000001e-05, "loss": 0.0986, "step": 4074 }, { "epoch": 22.895774647887325, "grad_norm": 0.43913179636001587, "learning_rate": 1.2216000000000001e-05, "loss": 0.0509, "step": 4075 }, { "epoch": 22.901408450704224, "grad_norm": 0.5295721888542175, "learning_rate": 1.2219e-05, "loss": 0.0615, "step": 4076 }, { "epoch": 22.907042253521126, "grad_norm": 0.5901570320129395, "learning_rate": 1.2222e-05, "loss": 0.0384, "step": 4077 }, { "epoch": 22.91267605633803, "grad_norm": 0.5716909766197205, "learning_rate": 1.2224999999999999e-05, "loss": 0.0891, "step": 4078 }, { "epoch": 22.91830985915493, "grad_norm": 0.6434781551361084, "learning_rate": 1.2228e-05, "loss": 0.0834, "step": 4079 }, { "epoch": 22.92394366197183, "grad_norm": 0.411347359418869, "learning_rate": 1.2231e-05, "loss": 0.0395, "step": 4080 }, { "epoch": 22.929577464788732, "grad_norm": 0.8809104561805725, "learning_rate": 1.2234e-05, "loss": 0.0806, "step": 4081 }, { "epoch": 22.935211267605634, "grad_norm": 1.1916978359222412, "learning_rate": 1.2237e-05, "loss": 0.0703, "step": 4082 }, { "epoch": 22.940845070422537, "grad_norm": 0.4498680531978607, "learning_rate": 1.224e-05, "loss": 0.0472, "step": 4083 }, { "epoch": 22.946478873239435, "grad_norm": 0.6316377520561218, "learning_rate": 1.2243000000000001e-05, "loss": 0.0549, "step": 4084 }, { "epoch": 22.952112676056338, "grad_norm": 0.5239761471748352, "learning_rate": 1.2246000000000001e-05, "loss": 0.0642, "step": 4085 }, { "epoch": 22.95774647887324, "grad_norm": 0.7123968005180359, "learning_rate": 1.2249e-05, "loss": 0.0417, "step": 4086 }, { "epoch": 22.963380281690142, "grad_norm": 0.5085312128067017, "learning_rate": 1.2252e-05, "loss": 0.0999, "step": 4087 }, { "epoch": 22.96901408450704, "grad_norm": 0.7167842388153076, "learning_rate": 1.2254999999999999e-05, "loss": 0.1017, "step": 4088 }, { "epoch": 22.974647887323943, "grad_norm": 0.5447570085525513, "learning_rate": 1.2258e-05, "loss": 0.0323, "step": 4089 }, { "epoch": 22.980281690140846, "grad_norm": 0.5587484836578369, "learning_rate": 1.2261e-05, "loss": 0.0571, "step": 4090 }, { "epoch": 22.985915492957748, "grad_norm": 0.5065699815750122, "learning_rate": 1.2264e-05, "loss": 0.0377, "step": 4091 }, { "epoch": 22.991549295774647, "grad_norm": 0.5544353723526001, "learning_rate": 1.2267e-05, "loss": 0.0267, "step": 4092 }, { "epoch": 22.99718309859155, "grad_norm": 0.8918290138244629, "learning_rate": 1.227e-05, "loss": 0.1416, "step": 4093 }, { "epoch": 23.0, "grad_norm": 0.3395528197288513, "learning_rate": 1.2273000000000001e-05, "loss": 0.0235, "step": 4094 }, { "epoch": 23.005633802816902, "grad_norm": 0.9658334255218506, "learning_rate": 1.2276e-05, "loss": 0.2249, "step": 4095 }, { "epoch": 23.011267605633805, "grad_norm": 1.1492165327072144, "learning_rate": 1.2279e-05, "loss": 0.1731, "step": 4096 }, { "epoch": 23.016901408450703, "grad_norm": 0.7275222539901733, "learning_rate": 1.2282e-05, "loss": 0.1664, "step": 4097 }, { "epoch": 23.022535211267606, "grad_norm": 0.5715535879135132, "learning_rate": 1.2285e-05, "loss": 0.2106, "step": 4098 }, { "epoch": 23.028169014084508, "grad_norm": 0.6236465573310852, "learning_rate": 1.2288000000000002e-05, "loss": 0.1553, "step": 4099 }, { "epoch": 23.03380281690141, "grad_norm": 0.6167938113212585, "learning_rate": 1.2291000000000001e-05, "loss": 0.1221, "step": 4100 }, { "epoch": 23.03943661971831, "grad_norm": 0.5418891310691833, "learning_rate": 1.2294e-05, "loss": 0.1148, "step": 4101 }, { "epoch": 23.04507042253521, "grad_norm": 0.8033453226089478, "learning_rate": 1.2297e-05, "loss": 0.1263, "step": 4102 }, { "epoch": 23.050704225352113, "grad_norm": 0.6530650854110718, "learning_rate": 1.2299999999999999e-05, "loss": 0.1237, "step": 4103 }, { "epoch": 23.056338028169016, "grad_norm": 0.505116879940033, "learning_rate": 1.2303e-05, "loss": 0.0806, "step": 4104 }, { "epoch": 23.061971830985915, "grad_norm": 0.7444429993629456, "learning_rate": 1.2306e-05, "loss": 0.169, "step": 4105 }, { "epoch": 23.067605633802817, "grad_norm": 0.6004387736320496, "learning_rate": 1.2309e-05, "loss": 0.0898, "step": 4106 }, { "epoch": 23.07323943661972, "grad_norm": 0.6814025640487671, "learning_rate": 1.2312e-05, "loss": 0.1015, "step": 4107 }, { "epoch": 23.07887323943662, "grad_norm": 0.7561472058296204, "learning_rate": 1.2315e-05, "loss": 0.0932, "step": 4108 }, { "epoch": 23.08450704225352, "grad_norm": 0.555224597454071, "learning_rate": 1.2318000000000001e-05, "loss": 0.0848, "step": 4109 }, { "epoch": 23.090140845070422, "grad_norm": 0.5732291340827942, "learning_rate": 1.2321000000000001e-05, "loss": 0.0865, "step": 4110 }, { "epoch": 23.095774647887325, "grad_norm": 0.5083555579185486, "learning_rate": 1.2324000000000001e-05, "loss": 0.0798, "step": 4111 }, { "epoch": 23.101408450704227, "grad_norm": 0.6031458973884583, "learning_rate": 1.2327e-05, "loss": 0.0949, "step": 4112 }, { "epoch": 23.107042253521126, "grad_norm": 0.7420357465744019, "learning_rate": 1.2329999999999999e-05, "loss": 0.1085, "step": 4113 }, { "epoch": 23.112676056338028, "grad_norm": 0.7225790023803711, "learning_rate": 1.2333e-05, "loss": 0.073, "step": 4114 }, { "epoch": 23.11830985915493, "grad_norm": 1.9921703338623047, "learning_rate": 1.2336e-05, "loss": 0.1105, "step": 4115 }, { "epoch": 23.123943661971833, "grad_norm": 0.7226966023445129, "learning_rate": 1.2339e-05, "loss": 0.0875, "step": 4116 }, { "epoch": 23.12957746478873, "grad_norm": 0.5651920437812805, "learning_rate": 1.2342e-05, "loss": 0.0754, "step": 4117 }, { "epoch": 23.135211267605634, "grad_norm": 0.8205820322036743, "learning_rate": 1.2345e-05, "loss": 0.1029, "step": 4118 }, { "epoch": 23.140845070422536, "grad_norm": 0.5022649765014648, "learning_rate": 1.2348000000000001e-05, "loss": 0.0532, "step": 4119 }, { "epoch": 23.146478873239438, "grad_norm": 0.5159724950790405, "learning_rate": 1.2351e-05, "loss": 0.074, "step": 4120 }, { "epoch": 23.152112676056337, "grad_norm": 0.5425541400909424, "learning_rate": 1.2354e-05, "loss": 0.0719, "step": 4121 }, { "epoch": 23.15774647887324, "grad_norm": 0.4676657021045685, "learning_rate": 1.2357e-05, "loss": 0.0663, "step": 4122 }, { "epoch": 23.16338028169014, "grad_norm": 0.4086676836013794, "learning_rate": 1.236e-05, "loss": 0.0478, "step": 4123 }, { "epoch": 23.169014084507044, "grad_norm": 0.5624715089797974, "learning_rate": 1.2363000000000002e-05, "loss": 0.1038, "step": 4124 }, { "epoch": 23.174647887323943, "grad_norm": 0.6333167552947998, "learning_rate": 1.2366e-05, "loss": 0.0746, "step": 4125 }, { "epoch": 23.180281690140845, "grad_norm": 0.6504220366477966, "learning_rate": 1.2369e-05, "loss": 0.0431, "step": 4126 }, { "epoch": 23.185915492957747, "grad_norm": 0.39182570576667786, "learning_rate": 1.2372e-05, "loss": 0.0847, "step": 4127 }, { "epoch": 23.19154929577465, "grad_norm": 0.43021780252456665, "learning_rate": 1.2375e-05, "loss": 0.046, "step": 4128 }, { "epoch": 23.197183098591548, "grad_norm": 0.6490305066108704, "learning_rate": 1.2378e-05, "loss": 0.0358, "step": 4129 }, { "epoch": 23.20281690140845, "grad_norm": 1.1020830869674683, "learning_rate": 1.2381e-05, "loss": 0.0762, "step": 4130 }, { "epoch": 23.208450704225353, "grad_norm": 0.6040196418762207, "learning_rate": 1.2384e-05, "loss": 0.0478, "step": 4131 }, { "epoch": 23.214084507042255, "grad_norm": 0.6052760481834412, "learning_rate": 1.2387e-05, "loss": 0.0536, "step": 4132 }, { "epoch": 23.219718309859154, "grad_norm": 0.5066599249839783, "learning_rate": 1.239e-05, "loss": 0.0568, "step": 4133 }, { "epoch": 23.225352112676056, "grad_norm": 1.1624547243118286, "learning_rate": 1.2393000000000001e-05, "loss": 0.0605, "step": 4134 }, { "epoch": 23.23098591549296, "grad_norm": 0.4561499357223511, "learning_rate": 1.2396000000000001e-05, "loss": 0.0568, "step": 4135 }, { "epoch": 23.23661971830986, "grad_norm": 0.5617054104804993, "learning_rate": 1.2399000000000001e-05, "loss": 0.0714, "step": 4136 }, { "epoch": 23.24225352112676, "grad_norm": 0.6150948405265808, "learning_rate": 1.2402e-05, "loss": 0.0929, "step": 4137 }, { "epoch": 23.24788732394366, "grad_norm": 0.6507525444030762, "learning_rate": 1.2404999999999999e-05, "loss": 0.0265, "step": 4138 }, { "epoch": 23.253521126760564, "grad_norm": 0.9486775398254395, "learning_rate": 1.2408e-05, "loss": 0.1635, "step": 4139 }, { "epoch": 23.259154929577466, "grad_norm": 0.6042888760566711, "learning_rate": 1.2411e-05, "loss": 0.1452, "step": 4140 }, { "epoch": 23.264788732394365, "grad_norm": 0.9253517985343933, "learning_rate": 1.2414e-05, "loss": 0.1512, "step": 4141 }, { "epoch": 23.270422535211267, "grad_norm": 0.7138837575912476, "learning_rate": 1.2417e-05, "loss": 0.1853, "step": 4142 }, { "epoch": 23.27605633802817, "grad_norm": 0.5281553268432617, "learning_rate": 1.242e-05, "loss": 0.1459, "step": 4143 }, { "epoch": 23.281690140845072, "grad_norm": 0.8801809549331665, "learning_rate": 1.2423000000000001e-05, "loss": 0.1573, "step": 4144 }, { "epoch": 23.28732394366197, "grad_norm": 0.623673677444458, "learning_rate": 1.2426000000000001e-05, "loss": 0.1527, "step": 4145 }, { "epoch": 23.292957746478873, "grad_norm": 0.7898014187812805, "learning_rate": 1.2429e-05, "loss": 0.1174, "step": 4146 }, { "epoch": 23.298591549295775, "grad_norm": 1.1000264883041382, "learning_rate": 1.2432e-05, "loss": 0.1468, "step": 4147 }, { "epoch": 23.304225352112677, "grad_norm": 0.6301991939544678, "learning_rate": 1.2435e-05, "loss": 0.0883, "step": 4148 }, { "epoch": 23.309859154929576, "grad_norm": 0.6723244190216064, "learning_rate": 1.2438000000000002e-05, "loss": 0.1397, "step": 4149 }, { "epoch": 23.31549295774648, "grad_norm": 0.8874301910400391, "learning_rate": 1.2441e-05, "loss": 0.1111, "step": 4150 }, { "epoch": 23.32112676056338, "grad_norm": 0.8632453083992004, "learning_rate": 1.2444e-05, "loss": 0.094, "step": 4151 }, { "epoch": 23.326760563380283, "grad_norm": 0.5346524715423584, "learning_rate": 1.2447e-05, "loss": 0.1093, "step": 4152 }, { "epoch": 23.33239436619718, "grad_norm": 0.5492019653320312, "learning_rate": 1.245e-05, "loss": 0.1391, "step": 4153 }, { "epoch": 23.338028169014084, "grad_norm": 0.6774193048477173, "learning_rate": 1.2453000000000001e-05, "loss": 0.073, "step": 4154 }, { "epoch": 23.343661971830986, "grad_norm": 0.6376898884773254, "learning_rate": 1.2456e-05, "loss": 0.0908, "step": 4155 }, { "epoch": 23.34929577464789, "grad_norm": 0.5743589401245117, "learning_rate": 1.2459e-05, "loss": 0.0885, "step": 4156 }, { "epoch": 23.354929577464787, "grad_norm": 0.8147374391555786, "learning_rate": 1.2462e-05, "loss": 0.1088, "step": 4157 }, { "epoch": 23.36056338028169, "grad_norm": 0.443757563829422, "learning_rate": 1.2465e-05, "loss": 0.0503, "step": 4158 }, { "epoch": 23.366197183098592, "grad_norm": 0.7925986051559448, "learning_rate": 1.2468000000000002e-05, "loss": 0.1087, "step": 4159 }, { "epoch": 23.371830985915494, "grad_norm": 0.5975234508514404, "learning_rate": 1.2471000000000001e-05, "loss": 0.0763, "step": 4160 }, { "epoch": 23.377464788732393, "grad_norm": 0.43297088146209717, "learning_rate": 1.2474000000000001e-05, "loss": 0.0415, "step": 4161 }, { "epoch": 23.383098591549295, "grad_norm": 0.5459612011909485, "learning_rate": 1.2477e-05, "loss": 0.0587, "step": 4162 }, { "epoch": 23.388732394366198, "grad_norm": 0.947593629360199, "learning_rate": 1.2479999999999999e-05, "loss": 0.1028, "step": 4163 }, { "epoch": 23.3943661971831, "grad_norm": 0.5713532567024231, "learning_rate": 1.2483e-05, "loss": 0.0682, "step": 4164 }, { "epoch": 23.4, "grad_norm": 0.5250890254974365, "learning_rate": 1.2486e-05, "loss": 0.101, "step": 4165 }, { "epoch": 23.4056338028169, "grad_norm": 0.5824111104011536, "learning_rate": 1.2489e-05, "loss": 0.0599, "step": 4166 }, { "epoch": 23.411267605633803, "grad_norm": 0.7523699402809143, "learning_rate": 1.2492e-05, "loss": 0.0449, "step": 4167 }, { "epoch": 23.416901408450705, "grad_norm": 0.5430312156677246, "learning_rate": 1.2495e-05, "loss": 0.0378, "step": 4168 }, { "epoch": 23.422535211267604, "grad_norm": 0.49639061093330383, "learning_rate": 1.2498000000000001e-05, "loss": 0.0468, "step": 4169 }, { "epoch": 23.428169014084506, "grad_norm": 0.6305939555168152, "learning_rate": 1.2501000000000001e-05, "loss": 0.055, "step": 4170 }, { "epoch": 23.43380281690141, "grad_norm": 0.4951257109642029, "learning_rate": 1.2504000000000001e-05, "loss": 0.0554, "step": 4171 }, { "epoch": 23.43943661971831, "grad_norm": 1.2714241743087769, "learning_rate": 1.2507e-05, "loss": 0.108, "step": 4172 }, { "epoch": 23.44507042253521, "grad_norm": 0.6206589341163635, "learning_rate": 1.251e-05, "loss": 0.0786, "step": 4173 }, { "epoch": 23.450704225352112, "grad_norm": 0.6703273057937622, "learning_rate": 1.2513e-05, "loss": 0.0975, "step": 4174 }, { "epoch": 23.456338028169014, "grad_norm": 0.5862429738044739, "learning_rate": 1.2516e-05, "loss": 0.0472, "step": 4175 }, { "epoch": 23.461971830985917, "grad_norm": 0.5780446529388428, "learning_rate": 1.2519e-05, "loss": 0.038, "step": 4176 }, { "epoch": 23.467605633802815, "grad_norm": 0.5202690958976746, "learning_rate": 1.2522e-05, "loss": 0.0343, "step": 4177 }, { "epoch": 23.473239436619718, "grad_norm": 0.8328983187675476, "learning_rate": 1.2525e-05, "loss": 0.0559, "step": 4178 }, { "epoch": 23.47887323943662, "grad_norm": 0.507195234298706, "learning_rate": 1.2528000000000001e-05, "loss": 0.0275, "step": 4179 }, { "epoch": 23.484507042253522, "grad_norm": 0.8309234380722046, "learning_rate": 1.2531e-05, "loss": 0.0457, "step": 4180 }, { "epoch": 23.49014084507042, "grad_norm": 0.7990297675132751, "learning_rate": 1.2534e-05, "loss": 0.0633, "step": 4181 }, { "epoch": 23.495774647887323, "grad_norm": 0.5626228451728821, "learning_rate": 1.2537e-05, "loss": 0.0304, "step": 4182 }, { "epoch": 23.501408450704226, "grad_norm": 1.3554123640060425, "learning_rate": 1.254e-05, "loss": 0.263, "step": 4183 }, { "epoch": 23.507042253521128, "grad_norm": 1.351234793663025, "learning_rate": 1.2543000000000002e-05, "loss": 0.1812, "step": 4184 }, { "epoch": 23.512676056338027, "grad_norm": 3.1715219020843506, "learning_rate": 1.2546000000000002e-05, "loss": 0.1397, "step": 4185 }, { "epoch": 23.51830985915493, "grad_norm": 0.639583170413971, "learning_rate": 1.2549000000000001e-05, "loss": 0.1204, "step": 4186 }, { "epoch": 23.52394366197183, "grad_norm": 0.6878611445426941, "learning_rate": 1.2552e-05, "loss": 0.174, "step": 4187 }, { "epoch": 23.529577464788733, "grad_norm": 0.7327574491500854, "learning_rate": 1.2555e-05, "loss": 0.1507, "step": 4188 }, { "epoch": 23.535211267605632, "grad_norm": 0.7077688574790955, "learning_rate": 1.2558e-05, "loss": 0.118, "step": 4189 }, { "epoch": 23.540845070422534, "grad_norm": 0.9437618255615234, "learning_rate": 1.2561e-05, "loss": 0.2188, "step": 4190 }, { "epoch": 23.546478873239437, "grad_norm": 0.8368025422096252, "learning_rate": 1.2564e-05, "loss": 0.0985, "step": 4191 }, { "epoch": 23.55211267605634, "grad_norm": 0.7891712784767151, "learning_rate": 1.2567e-05, "loss": 0.1178, "step": 4192 }, { "epoch": 23.557746478873238, "grad_norm": 0.8342640399932861, "learning_rate": 1.257e-05, "loss": 0.0925, "step": 4193 }, { "epoch": 23.56338028169014, "grad_norm": 0.9886460900306702, "learning_rate": 1.2573e-05, "loss": 0.0883, "step": 4194 }, { "epoch": 23.569014084507042, "grad_norm": 0.9591385722160339, "learning_rate": 1.2576000000000001e-05, "loss": 0.1424, "step": 4195 }, { "epoch": 23.574647887323945, "grad_norm": 0.6826116442680359, "learning_rate": 1.2579000000000001e-05, "loss": 0.1154, "step": 4196 }, { "epoch": 23.580281690140843, "grad_norm": 0.5612527132034302, "learning_rate": 1.2582e-05, "loss": 0.0935, "step": 4197 }, { "epoch": 23.585915492957746, "grad_norm": 0.5967805981636047, "learning_rate": 1.2585e-05, "loss": 0.0786, "step": 4198 }, { "epoch": 23.591549295774648, "grad_norm": 0.7391576766967773, "learning_rate": 1.2587999999999999e-05, "loss": 0.1014, "step": 4199 }, { "epoch": 23.59718309859155, "grad_norm": 0.7565858364105225, "learning_rate": 1.2591e-05, "loss": 0.0868, "step": 4200 }, { "epoch": 23.60281690140845, "grad_norm": 0.6568203568458557, "learning_rate": 1.2594e-05, "loss": 0.0549, "step": 4201 }, { "epoch": 23.60845070422535, "grad_norm": 0.40323689579963684, "learning_rate": 1.2597e-05, "loss": 0.0593, "step": 4202 }, { "epoch": 23.614084507042254, "grad_norm": 0.678924024105072, "learning_rate": 1.26e-05, "loss": 0.0767, "step": 4203 }, { "epoch": 23.619718309859156, "grad_norm": 1.4756662845611572, "learning_rate": 1.2603e-05, "loss": 0.0807, "step": 4204 }, { "epoch": 23.625352112676055, "grad_norm": 0.8821166157722473, "learning_rate": 1.2606000000000001e-05, "loss": 0.1218, "step": 4205 }, { "epoch": 23.630985915492957, "grad_norm": 0.6707702279090881, "learning_rate": 1.2609e-05, "loss": 0.0681, "step": 4206 }, { "epoch": 23.63661971830986, "grad_norm": 1.0192433595657349, "learning_rate": 1.2612e-05, "loss": 0.0588, "step": 4207 }, { "epoch": 23.64225352112676, "grad_norm": 0.5727977156639099, "learning_rate": 1.2615e-05, "loss": 0.071, "step": 4208 }, { "epoch": 23.647887323943664, "grad_norm": 0.6100397109985352, "learning_rate": 1.2618e-05, "loss": 0.064, "step": 4209 }, { "epoch": 23.653521126760563, "grad_norm": 0.5034952759742737, "learning_rate": 1.2621000000000002e-05, "loss": 0.0427, "step": 4210 }, { "epoch": 23.659154929577465, "grad_norm": 0.748559296131134, "learning_rate": 1.2624e-05, "loss": 0.0353, "step": 4211 }, { "epoch": 23.664788732394367, "grad_norm": 0.5814128518104553, "learning_rate": 1.2627e-05, "loss": 0.072, "step": 4212 }, { "epoch": 23.670422535211266, "grad_norm": 0.6582777500152588, "learning_rate": 1.263e-05, "loss": 0.078, "step": 4213 }, { "epoch": 23.676056338028168, "grad_norm": 0.4307142198085785, "learning_rate": 1.2633e-05, "loss": 0.0356, "step": 4214 }, { "epoch": 23.68169014084507, "grad_norm": 0.9973230361938477, "learning_rate": 1.2636e-05, "loss": 0.1175, "step": 4215 }, { "epoch": 23.687323943661973, "grad_norm": 0.48579785227775574, "learning_rate": 1.2639e-05, "loss": 0.0555, "step": 4216 }, { "epoch": 23.692957746478875, "grad_norm": 0.6463684439659119, "learning_rate": 1.2642e-05, "loss": 0.0493, "step": 4217 }, { "epoch": 23.698591549295774, "grad_norm": 0.6669109463691711, "learning_rate": 1.2645e-05, "loss": 0.0552, "step": 4218 }, { "epoch": 23.704225352112676, "grad_norm": 0.3745613396167755, "learning_rate": 1.2648e-05, "loss": 0.0576, "step": 4219 }, { "epoch": 23.70985915492958, "grad_norm": 0.4938630163669586, "learning_rate": 1.2651000000000001e-05, "loss": 0.036, "step": 4220 }, { "epoch": 23.71549295774648, "grad_norm": 0.529007077217102, "learning_rate": 1.2654000000000001e-05, "loss": 0.0335, "step": 4221 }, { "epoch": 23.72112676056338, "grad_norm": 0.5144939422607422, "learning_rate": 1.2657000000000001e-05, "loss": 0.0751, "step": 4222 }, { "epoch": 23.72676056338028, "grad_norm": 0.7330500483512878, "learning_rate": 1.2659999999999999e-05, "loss": 0.0704, "step": 4223 }, { "epoch": 23.732394366197184, "grad_norm": 0.38269367814064026, "learning_rate": 1.2662999999999999e-05, "loss": 0.0318, "step": 4224 }, { "epoch": 23.738028169014086, "grad_norm": 0.6206733584403992, "learning_rate": 1.2666e-05, "loss": 0.0283, "step": 4225 }, { "epoch": 23.743661971830985, "grad_norm": 0.609607994556427, "learning_rate": 1.2669e-05, "loss": 0.0598, "step": 4226 }, { "epoch": 23.749295774647887, "grad_norm": 1.0881476402282715, "learning_rate": 1.2672e-05, "loss": 0.21, "step": 4227 }, { "epoch": 23.75492957746479, "grad_norm": 0.672999918460846, "learning_rate": 1.2675e-05, "loss": 0.1592, "step": 4228 }, { "epoch": 23.760563380281692, "grad_norm": 0.7813212275505066, "learning_rate": 1.2678e-05, "loss": 0.1715, "step": 4229 }, { "epoch": 23.76619718309859, "grad_norm": 0.7683329582214355, "learning_rate": 1.2681000000000001e-05, "loss": 0.1602, "step": 4230 }, { "epoch": 23.771830985915493, "grad_norm": 0.6045087575912476, "learning_rate": 1.2684000000000001e-05, "loss": 0.1133, "step": 4231 }, { "epoch": 23.777464788732395, "grad_norm": 0.6047942042350769, "learning_rate": 1.2687e-05, "loss": 0.1344, "step": 4232 }, { "epoch": 23.783098591549297, "grad_norm": 0.6312155723571777, "learning_rate": 1.269e-05, "loss": 0.1668, "step": 4233 }, { "epoch": 23.788732394366196, "grad_norm": 0.9165892004966736, "learning_rate": 1.2693e-05, "loss": 0.1897, "step": 4234 }, { "epoch": 23.7943661971831, "grad_norm": 0.6850040555000305, "learning_rate": 1.2696000000000002e-05, "loss": 0.1119, "step": 4235 }, { "epoch": 23.8, "grad_norm": 0.5841269493103027, "learning_rate": 1.2699e-05, "loss": 0.0896, "step": 4236 }, { "epoch": 23.805633802816903, "grad_norm": 1.0083311796188354, "learning_rate": 1.2702e-05, "loss": 0.1384, "step": 4237 }, { "epoch": 23.8112676056338, "grad_norm": 0.6973533630371094, "learning_rate": 1.2705e-05, "loss": 0.1212, "step": 4238 }, { "epoch": 23.816901408450704, "grad_norm": 0.76484614610672, "learning_rate": 1.2708e-05, "loss": 0.1586, "step": 4239 }, { "epoch": 23.822535211267606, "grad_norm": 0.8694419264793396, "learning_rate": 1.2711e-05, "loss": 0.1533, "step": 4240 }, { "epoch": 23.82816901408451, "grad_norm": 0.7064487934112549, "learning_rate": 1.2714e-05, "loss": 0.1218, "step": 4241 }, { "epoch": 23.833802816901407, "grad_norm": 0.8663605451583862, "learning_rate": 1.2717e-05, "loss": 0.1637, "step": 4242 }, { "epoch": 23.83943661971831, "grad_norm": 0.5701957941055298, "learning_rate": 1.272e-05, "loss": 0.0696, "step": 4243 }, { "epoch": 23.845070422535212, "grad_norm": 0.5791113972663879, "learning_rate": 1.2723e-05, "loss": 0.0714, "step": 4244 }, { "epoch": 23.850704225352114, "grad_norm": 0.6839434504508972, "learning_rate": 1.2726000000000001e-05, "loss": 0.0716, "step": 4245 }, { "epoch": 23.856338028169013, "grad_norm": 0.7542939782142639, "learning_rate": 1.2729000000000001e-05, "loss": 0.0984, "step": 4246 }, { "epoch": 23.861971830985915, "grad_norm": 0.4452124834060669, "learning_rate": 1.2732000000000001e-05, "loss": 0.0533, "step": 4247 }, { "epoch": 23.867605633802818, "grad_norm": 0.8278579711914062, "learning_rate": 1.2735e-05, "loss": 0.0976, "step": 4248 }, { "epoch": 23.87323943661972, "grad_norm": 0.677161455154419, "learning_rate": 1.2737999999999999e-05, "loss": 0.0951, "step": 4249 }, { "epoch": 23.87887323943662, "grad_norm": 0.4953368306159973, "learning_rate": 1.2741e-05, "loss": 0.0476, "step": 4250 }, { "epoch": 23.88450704225352, "grad_norm": 0.9795747399330139, "learning_rate": 1.2744e-05, "loss": 0.0691, "step": 4251 }, { "epoch": 23.890140845070423, "grad_norm": 0.5633463263511658, "learning_rate": 1.2747e-05, "loss": 0.0523, "step": 4252 }, { "epoch": 23.895774647887325, "grad_norm": 0.6801254153251648, "learning_rate": 1.275e-05, "loss": 0.0682, "step": 4253 }, { "epoch": 23.901408450704224, "grad_norm": 0.43914005160331726, "learning_rate": 1.2753e-05, "loss": 0.0528, "step": 4254 }, { "epoch": 23.907042253521126, "grad_norm": 0.627133309841156, "learning_rate": 1.2756000000000001e-05, "loss": 0.0647, "step": 4255 }, { "epoch": 23.91267605633803, "grad_norm": 0.8258602023124695, "learning_rate": 1.2759000000000001e-05, "loss": 0.0975, "step": 4256 }, { "epoch": 23.91830985915493, "grad_norm": 0.36823856830596924, "learning_rate": 1.2762e-05, "loss": 0.0236, "step": 4257 }, { "epoch": 23.92394366197183, "grad_norm": 1.3950039148330688, "learning_rate": 1.2765e-05, "loss": 0.1318, "step": 4258 }, { "epoch": 23.929577464788732, "grad_norm": 0.6775383353233337, "learning_rate": 1.2768e-05, "loss": 0.0635, "step": 4259 }, { "epoch": 23.935211267605634, "grad_norm": 0.4381648898124695, "learning_rate": 1.2771e-05, "loss": 0.0324, "step": 4260 }, { "epoch": 23.940845070422537, "grad_norm": 0.5386395454406738, "learning_rate": 1.2774e-05, "loss": 0.07, "step": 4261 }, { "epoch": 23.946478873239435, "grad_norm": 0.535469651222229, "learning_rate": 1.2777e-05, "loss": 0.038, "step": 4262 }, { "epoch": 23.952112676056338, "grad_norm": 1.0519461631774902, "learning_rate": 1.278e-05, "loss": 0.1245, "step": 4263 }, { "epoch": 23.95774647887324, "grad_norm": 0.5301055908203125, "learning_rate": 1.2783e-05, "loss": 0.0393, "step": 4264 }, { "epoch": 23.963380281690142, "grad_norm": 4.256547451019287, "learning_rate": 1.2786000000000001e-05, "loss": 0.123, "step": 4265 }, { "epoch": 23.96901408450704, "grad_norm": 0.6312299370765686, "learning_rate": 1.2789e-05, "loss": 0.0478, "step": 4266 }, { "epoch": 23.974647887323943, "grad_norm": 0.6121985912322998, "learning_rate": 1.2792e-05, "loss": 0.0326, "step": 4267 }, { "epoch": 23.980281690140846, "grad_norm": 0.9644697308540344, "learning_rate": 1.2795e-05, "loss": 0.0663, "step": 4268 }, { "epoch": 23.985915492957748, "grad_norm": 0.3976660966873169, "learning_rate": 1.2798e-05, "loss": 0.0208, "step": 4269 }, { "epoch": 23.991549295774647, "grad_norm": 0.7569806575775146, "learning_rate": 1.2801000000000002e-05, "loss": 0.0635, "step": 4270 }, { "epoch": 23.99718309859155, "grad_norm": 0.9420872926712036, "learning_rate": 1.2804000000000001e-05, "loss": 0.1382, "step": 4271 }, { "epoch": 24.0, "grad_norm": 0.9621819257736206, "learning_rate": 1.2807000000000001e-05, "loss": 0.04, "step": 4272 }, { "epoch": 24.005633802816902, "grad_norm": 1.1635017395019531, "learning_rate": 1.281e-05, "loss": 0.2842, "step": 4273 }, { "epoch": 24.011267605633805, "grad_norm": 0.7054412364959717, "learning_rate": 1.2812999999999999e-05, "loss": 0.1642, "step": 4274 }, { "epoch": 24.016901408450703, "grad_norm": 0.7256177067756653, "learning_rate": 1.2816e-05, "loss": 0.1483, "step": 4275 }, { "epoch": 24.022535211267606, "grad_norm": 0.8521655797958374, "learning_rate": 1.2819e-05, "loss": 0.1702, "step": 4276 }, { "epoch": 24.028169014084508, "grad_norm": 0.6305317878723145, "learning_rate": 1.2822e-05, "loss": 0.1479, "step": 4277 }, { "epoch": 24.03380281690141, "grad_norm": 0.6091189980506897, "learning_rate": 1.2825e-05, "loss": 0.1567, "step": 4278 }, { "epoch": 24.03943661971831, "grad_norm": 0.8497311472892761, "learning_rate": 1.2828e-05, "loss": 0.1506, "step": 4279 }, { "epoch": 24.04507042253521, "grad_norm": 0.9516878724098206, "learning_rate": 1.2831000000000001e-05, "loss": 0.153, "step": 4280 }, { "epoch": 24.050704225352113, "grad_norm": 0.6747698187828064, "learning_rate": 1.2834000000000001e-05, "loss": 0.1507, "step": 4281 }, { "epoch": 24.056338028169016, "grad_norm": 0.5229388475418091, "learning_rate": 1.2837000000000001e-05, "loss": 0.0793, "step": 4282 }, { "epoch": 24.061971830985915, "grad_norm": 0.6383851170539856, "learning_rate": 1.284e-05, "loss": 0.1594, "step": 4283 }, { "epoch": 24.067605633802817, "grad_norm": 0.7262201905250549, "learning_rate": 1.2843e-05, "loss": 0.0791, "step": 4284 }, { "epoch": 24.07323943661972, "grad_norm": 0.4323574900627136, "learning_rate": 1.2846e-05, "loss": 0.0632, "step": 4285 }, { "epoch": 24.07887323943662, "grad_norm": 0.6544243693351746, "learning_rate": 1.2849e-05, "loss": 0.1098, "step": 4286 }, { "epoch": 24.08450704225352, "grad_norm": 0.5314440131187439, "learning_rate": 1.2852e-05, "loss": 0.063, "step": 4287 }, { "epoch": 24.090140845070422, "grad_norm": 0.7520785331726074, "learning_rate": 1.2855e-05, "loss": 0.1004, "step": 4288 }, { "epoch": 24.095774647887325, "grad_norm": 0.580680787563324, "learning_rate": 1.2858e-05, "loss": 0.1182, "step": 4289 }, { "epoch": 24.101408450704227, "grad_norm": 0.6592906713485718, "learning_rate": 1.2861000000000001e-05, "loss": 0.0802, "step": 4290 }, { "epoch": 24.107042253521126, "grad_norm": 0.6720675826072693, "learning_rate": 1.2864e-05, "loss": 0.0839, "step": 4291 }, { "epoch": 24.112676056338028, "grad_norm": 0.5227365493774414, "learning_rate": 1.2867e-05, "loss": 0.0522, "step": 4292 }, { "epoch": 24.11830985915493, "grad_norm": 0.888308048248291, "learning_rate": 1.287e-05, "loss": 0.0902, "step": 4293 }, { "epoch": 24.123943661971833, "grad_norm": 0.9249238967895508, "learning_rate": 1.2873e-05, "loss": 0.0598, "step": 4294 }, { "epoch": 24.12957746478873, "grad_norm": 0.5107976198196411, "learning_rate": 1.2876000000000002e-05, "loss": 0.0753, "step": 4295 }, { "epoch": 24.135211267605634, "grad_norm": 0.6980229616165161, "learning_rate": 1.2879000000000002e-05, "loss": 0.0616, "step": 4296 }, { "epoch": 24.140845070422536, "grad_norm": 0.5665367245674133, "learning_rate": 1.2882e-05, "loss": 0.0594, "step": 4297 }, { "epoch": 24.146478873239438, "grad_norm": 0.5090133547782898, "learning_rate": 1.2885e-05, "loss": 0.0567, "step": 4298 }, { "epoch": 24.152112676056337, "grad_norm": 0.5456671714782715, "learning_rate": 1.2888e-05, "loss": 0.0541, "step": 4299 }, { "epoch": 24.15774647887324, "grad_norm": 0.49437516927719116, "learning_rate": 1.2891e-05, "loss": 0.0417, "step": 4300 }, { "epoch": 24.16338028169014, "grad_norm": 0.6400147080421448, "learning_rate": 1.2894e-05, "loss": 0.0426, "step": 4301 }, { "epoch": 24.169014084507044, "grad_norm": 0.60114586353302, "learning_rate": 1.2897e-05, "loss": 0.075, "step": 4302 }, { "epoch": 24.174647887323943, "grad_norm": 0.8365102410316467, "learning_rate": 1.29e-05, "loss": 0.0479, "step": 4303 }, { "epoch": 24.180281690140845, "grad_norm": 0.5101485848426819, "learning_rate": 1.2903e-05, "loss": 0.0492, "step": 4304 }, { "epoch": 24.185915492957747, "grad_norm": 0.450923353433609, "learning_rate": 1.2906000000000001e-05, "loss": 0.0313, "step": 4305 }, { "epoch": 24.19154929577465, "grad_norm": 0.5153322219848633, "learning_rate": 1.2909000000000001e-05, "loss": 0.0746, "step": 4306 }, { "epoch": 24.197183098591548, "grad_norm": 0.7458231449127197, "learning_rate": 1.2912000000000001e-05, "loss": 0.0743, "step": 4307 }, { "epoch": 24.20281690140845, "grad_norm": 0.522832453250885, "learning_rate": 1.2915000000000001e-05, "loss": 0.0825, "step": 4308 }, { "epoch": 24.208450704225353, "grad_norm": 1.4099639654159546, "learning_rate": 1.2917999999999999e-05, "loss": 0.0833, "step": 4309 }, { "epoch": 24.214084507042255, "grad_norm": 0.4805450439453125, "learning_rate": 1.2921e-05, "loss": 0.0485, "step": 4310 }, { "epoch": 24.219718309859154, "grad_norm": 0.5413424968719482, "learning_rate": 1.2924e-05, "loss": 0.0343, "step": 4311 }, { "epoch": 24.225352112676056, "grad_norm": 0.68044114112854, "learning_rate": 1.2927e-05, "loss": 0.0626, "step": 4312 }, { "epoch": 24.23098591549296, "grad_norm": 0.4332643151283264, "learning_rate": 1.293e-05, "loss": 0.0189, "step": 4313 }, { "epoch": 24.23661971830986, "grad_norm": 0.8923380970954895, "learning_rate": 1.2933e-05, "loss": 0.0445, "step": 4314 }, { "epoch": 24.24225352112676, "grad_norm": 0.3333089351654053, "learning_rate": 1.2936000000000001e-05, "loss": 0.0189, "step": 4315 }, { "epoch": 24.24788732394366, "grad_norm": 0.588916003704071, "learning_rate": 1.2939000000000001e-05, "loss": 0.0468, "step": 4316 }, { "epoch": 24.253521126760564, "grad_norm": 0.7638625502586365, "learning_rate": 1.2942e-05, "loss": 0.1528, "step": 4317 }, { "epoch": 24.259154929577466, "grad_norm": 0.76870197057724, "learning_rate": 1.2945e-05, "loss": 0.1425, "step": 4318 }, { "epoch": 24.264788732394365, "grad_norm": 0.7347944974899292, "learning_rate": 1.2948e-05, "loss": 0.1473, "step": 4319 }, { "epoch": 24.270422535211267, "grad_norm": 0.5876345634460449, "learning_rate": 1.2951e-05, "loss": 0.132, "step": 4320 }, { "epoch": 24.27605633802817, "grad_norm": 0.6026130318641663, "learning_rate": 1.2954000000000002e-05, "loss": 0.1107, "step": 4321 }, { "epoch": 24.281690140845072, "grad_norm": 0.7429172992706299, "learning_rate": 1.2957e-05, "loss": 0.1818, "step": 4322 }, { "epoch": 24.28732394366197, "grad_norm": 0.6619174480438232, "learning_rate": 1.296e-05, "loss": 0.1216, "step": 4323 }, { "epoch": 24.292957746478873, "grad_norm": 0.7262908220291138, "learning_rate": 1.2963e-05, "loss": 0.136, "step": 4324 }, { "epoch": 24.298591549295775, "grad_norm": 0.690660297870636, "learning_rate": 1.2966e-05, "loss": 0.1332, "step": 4325 }, { "epoch": 24.304225352112677, "grad_norm": 0.6459077000617981, "learning_rate": 1.2969e-05, "loss": 0.0831, "step": 4326 }, { "epoch": 24.309859154929576, "grad_norm": 0.7009790539741516, "learning_rate": 1.2972e-05, "loss": 0.1238, "step": 4327 }, { "epoch": 24.31549295774648, "grad_norm": 0.5944879055023193, "learning_rate": 1.2975e-05, "loss": 0.0955, "step": 4328 }, { "epoch": 24.32112676056338, "grad_norm": 0.5653350949287415, "learning_rate": 1.2978e-05, "loss": 0.097, "step": 4329 }, { "epoch": 24.326760563380283, "grad_norm": 1.00380277633667, "learning_rate": 1.2981e-05, "loss": 0.1321, "step": 4330 }, { "epoch": 24.33239436619718, "grad_norm": 0.7059932351112366, "learning_rate": 1.2984000000000001e-05, "loss": 0.0653, "step": 4331 }, { "epoch": 24.338028169014084, "grad_norm": 0.53305983543396, "learning_rate": 1.2987000000000001e-05, "loss": 0.0773, "step": 4332 }, { "epoch": 24.343661971830986, "grad_norm": 0.648902952671051, "learning_rate": 1.2990000000000001e-05, "loss": 0.0659, "step": 4333 }, { "epoch": 24.34929577464789, "grad_norm": 0.7834515571594238, "learning_rate": 1.2992999999999999e-05, "loss": 0.1063, "step": 4334 }, { "epoch": 24.354929577464787, "grad_norm": 0.6119270324707031, "learning_rate": 1.2995999999999999e-05, "loss": 0.0671, "step": 4335 }, { "epoch": 24.36056338028169, "grad_norm": 0.567958652973175, "learning_rate": 1.2999e-05, "loss": 0.087, "step": 4336 }, { "epoch": 24.366197183098592, "grad_norm": 0.6149867177009583, "learning_rate": 1.3002e-05, "loss": 0.0762, "step": 4337 }, { "epoch": 24.371830985915494, "grad_norm": 0.6790911555290222, "learning_rate": 1.3005e-05, "loss": 0.0735, "step": 4338 }, { "epoch": 24.377464788732393, "grad_norm": 0.5433534979820251, "learning_rate": 1.3008e-05, "loss": 0.0954, "step": 4339 }, { "epoch": 24.383098591549295, "grad_norm": 0.5855494141578674, "learning_rate": 1.3011e-05, "loss": 0.0729, "step": 4340 }, { "epoch": 24.388732394366198, "grad_norm": 0.5582989454269409, "learning_rate": 1.3014000000000001e-05, "loss": 0.0972, "step": 4341 }, { "epoch": 24.3943661971831, "grad_norm": 0.7045875191688538, "learning_rate": 1.3017000000000001e-05, "loss": 0.0777, "step": 4342 }, { "epoch": 24.4, "grad_norm": 0.6210392117500305, "learning_rate": 1.302e-05, "loss": 0.1096, "step": 4343 }, { "epoch": 24.4056338028169, "grad_norm": 0.6489035487174988, "learning_rate": 1.3023e-05, "loss": 0.0686, "step": 4344 }, { "epoch": 24.411267605633803, "grad_norm": 0.5350387096405029, "learning_rate": 1.3026e-05, "loss": 0.068, "step": 4345 }, { "epoch": 24.416901408450705, "grad_norm": 0.46874353289604187, "learning_rate": 1.3029e-05, "loss": 0.0459, "step": 4346 }, { "epoch": 24.422535211267604, "grad_norm": 0.5414627194404602, "learning_rate": 1.3032e-05, "loss": 0.0599, "step": 4347 }, { "epoch": 24.428169014084506, "grad_norm": 0.7227060198783875, "learning_rate": 1.3035e-05, "loss": 0.051, "step": 4348 }, { "epoch": 24.43380281690141, "grad_norm": 0.7680533528327942, "learning_rate": 1.3038e-05, "loss": 0.0975, "step": 4349 }, { "epoch": 24.43943661971831, "grad_norm": 0.6085177659988403, "learning_rate": 1.3041e-05, "loss": 0.0441, "step": 4350 }, { "epoch": 24.44507042253521, "grad_norm": 0.46126022934913635, "learning_rate": 1.3044e-05, "loss": 0.0341, "step": 4351 }, { "epoch": 24.450704225352112, "grad_norm": 0.8547120094299316, "learning_rate": 1.3047e-05, "loss": 0.0714, "step": 4352 }, { "epoch": 24.456338028169014, "grad_norm": 0.5048354864120483, "learning_rate": 1.305e-05, "loss": 0.0702, "step": 4353 }, { "epoch": 24.461971830985917, "grad_norm": 0.43624362349510193, "learning_rate": 1.3053e-05, "loss": 0.0281, "step": 4354 }, { "epoch": 24.467605633802815, "grad_norm": 1.0516419410705566, "learning_rate": 1.3056e-05, "loss": 0.0725, "step": 4355 }, { "epoch": 24.473239436619718, "grad_norm": 0.7529454231262207, "learning_rate": 1.3059000000000002e-05, "loss": 0.0798, "step": 4356 }, { "epoch": 24.47887323943662, "grad_norm": 0.5498228073120117, "learning_rate": 1.3062000000000001e-05, "loss": 0.0541, "step": 4357 }, { "epoch": 24.484507042253522, "grad_norm": 0.44744089245796204, "learning_rate": 1.3065000000000001e-05, "loss": 0.0292, "step": 4358 }, { "epoch": 24.49014084507042, "grad_norm": 1.380916953086853, "learning_rate": 1.3068e-05, "loss": 0.0809, "step": 4359 }, { "epoch": 24.495774647887323, "grad_norm": 0.7994491457939148, "learning_rate": 1.3070999999999999e-05, "loss": 0.0429, "step": 4360 }, { "epoch": 24.501408450704226, "grad_norm": 1.167894721031189, "learning_rate": 1.3074e-05, "loss": 0.2024, "step": 4361 }, { "epoch": 24.507042253521128, "grad_norm": 1.0000518560409546, "learning_rate": 1.3077e-05, "loss": 0.1557, "step": 4362 }, { "epoch": 24.512676056338027, "grad_norm": 0.8163649439811707, "learning_rate": 1.308e-05, "loss": 0.1469, "step": 4363 }, { "epoch": 24.51830985915493, "grad_norm": 0.716886043548584, "learning_rate": 1.3083e-05, "loss": 0.1346, "step": 4364 }, { "epoch": 24.52394366197183, "grad_norm": 0.8802850246429443, "learning_rate": 1.3086e-05, "loss": 0.1931, "step": 4365 }, { "epoch": 24.529577464788733, "grad_norm": 0.7565457224845886, "learning_rate": 1.3089000000000001e-05, "loss": 0.1205, "step": 4366 }, { "epoch": 24.535211267605632, "grad_norm": 0.6364262104034424, "learning_rate": 1.3092000000000001e-05, "loss": 0.0951, "step": 4367 }, { "epoch": 24.540845070422534, "grad_norm": 0.654219388961792, "learning_rate": 1.3095e-05, "loss": 0.1656, "step": 4368 }, { "epoch": 24.546478873239437, "grad_norm": 0.48334330320358276, "learning_rate": 1.3098e-05, "loss": 0.0874, "step": 4369 }, { "epoch": 24.55211267605634, "grad_norm": 0.5781785845756531, "learning_rate": 1.3101e-05, "loss": 0.1109, "step": 4370 }, { "epoch": 24.557746478873238, "grad_norm": 0.6490851640701294, "learning_rate": 1.3104e-05, "loss": 0.1176, "step": 4371 }, { "epoch": 24.56338028169014, "grad_norm": 0.7927092909812927, "learning_rate": 1.3107e-05, "loss": 0.1115, "step": 4372 }, { "epoch": 24.569014084507042, "grad_norm": 0.8656513690948486, "learning_rate": 1.311e-05, "loss": 0.1793, "step": 4373 }, { "epoch": 24.574647887323945, "grad_norm": 0.545214831829071, "learning_rate": 1.3113e-05, "loss": 0.1095, "step": 4374 }, { "epoch": 24.580281690140843, "grad_norm": 0.48915979266166687, "learning_rate": 1.3116e-05, "loss": 0.1097, "step": 4375 }, { "epoch": 24.585915492957746, "grad_norm": 0.5279445648193359, "learning_rate": 1.3119000000000001e-05, "loss": 0.0798, "step": 4376 }, { "epoch": 24.591549295774648, "grad_norm": 0.5195039510726929, "learning_rate": 1.3122e-05, "loss": 0.0716, "step": 4377 }, { "epoch": 24.59718309859155, "grad_norm": 0.5286417603492737, "learning_rate": 1.3125e-05, "loss": 0.0853, "step": 4378 }, { "epoch": 24.60281690140845, "grad_norm": 0.4878678023815155, "learning_rate": 1.3128e-05, "loss": 0.0788, "step": 4379 }, { "epoch": 24.60845070422535, "grad_norm": 1.4464921951293945, "learning_rate": 1.3131e-05, "loss": 0.0463, "step": 4380 }, { "epoch": 24.614084507042254, "grad_norm": 0.5908610820770264, "learning_rate": 1.3134000000000002e-05, "loss": 0.0943, "step": 4381 }, { "epoch": 24.619718309859156, "grad_norm": 0.7351853251457214, "learning_rate": 1.3137000000000001e-05, "loss": 0.0927, "step": 4382 }, { "epoch": 24.625352112676055, "grad_norm": 0.7078545093536377, "learning_rate": 1.314e-05, "loss": 0.0917, "step": 4383 }, { "epoch": 24.630985915492957, "grad_norm": 0.7047293186187744, "learning_rate": 1.3143e-05, "loss": 0.0493, "step": 4384 }, { "epoch": 24.63661971830986, "grad_norm": 0.4532141089439392, "learning_rate": 1.3146e-05, "loss": 0.057, "step": 4385 }, { "epoch": 24.64225352112676, "grad_norm": 0.4082725942134857, "learning_rate": 1.3149e-05, "loss": 0.0555, "step": 4386 }, { "epoch": 24.647887323943664, "grad_norm": 0.5457994341850281, "learning_rate": 1.3152e-05, "loss": 0.0445, "step": 4387 }, { "epoch": 24.653521126760563, "grad_norm": 0.5672821402549744, "learning_rate": 1.3155e-05, "loss": 0.047, "step": 4388 }, { "epoch": 24.659154929577465, "grad_norm": 0.7905741930007935, "learning_rate": 1.3158e-05, "loss": 0.0358, "step": 4389 }, { "epoch": 24.664788732394367, "grad_norm": 0.7486527562141418, "learning_rate": 1.3161e-05, "loss": 0.068, "step": 4390 }, { "epoch": 24.670422535211266, "grad_norm": 0.4403298497200012, "learning_rate": 1.3164000000000001e-05, "loss": 0.0331, "step": 4391 }, { "epoch": 24.676056338028168, "grad_norm": 0.48020872473716736, "learning_rate": 1.3167000000000001e-05, "loss": 0.0421, "step": 4392 }, { "epoch": 24.68169014084507, "grad_norm": 0.4779815971851349, "learning_rate": 1.3170000000000001e-05, "loss": 0.1019, "step": 4393 }, { "epoch": 24.687323943661973, "grad_norm": 0.8589065074920654, "learning_rate": 1.3173e-05, "loss": 0.0908, "step": 4394 }, { "epoch": 24.692957746478875, "grad_norm": 0.4241272211074829, "learning_rate": 1.3175999999999999e-05, "loss": 0.0604, "step": 4395 }, { "epoch": 24.698591549295774, "grad_norm": 0.5782935619354248, "learning_rate": 1.3179e-05, "loss": 0.0489, "step": 4396 }, { "epoch": 24.704225352112676, "grad_norm": 0.5078334808349609, "learning_rate": 1.3182e-05, "loss": 0.0336, "step": 4397 }, { "epoch": 24.70985915492958, "grad_norm": 0.45001325011253357, "learning_rate": 1.3185e-05, "loss": 0.0334, "step": 4398 }, { "epoch": 24.71549295774648, "grad_norm": 0.6733123660087585, "learning_rate": 1.3188e-05, "loss": 0.0412, "step": 4399 }, { "epoch": 24.72112676056338, "grad_norm": 0.6495485901832581, "learning_rate": 1.3191e-05, "loss": 0.0395, "step": 4400 }, { "epoch": 24.72676056338028, "grad_norm": 0.666325569152832, "learning_rate": 1.3194000000000001e-05, "loss": 0.0318, "step": 4401 }, { "epoch": 24.732394366197184, "grad_norm": 0.5263110995292664, "learning_rate": 1.3197000000000001e-05, "loss": 0.0556, "step": 4402 }, { "epoch": 24.738028169014086, "grad_norm": 0.7274307608604431, "learning_rate": 1.32e-05, "loss": 0.1255, "step": 4403 }, { "epoch": 24.743661971830985, "grad_norm": 0.5420170426368713, "learning_rate": 1.3203e-05, "loss": 0.0254, "step": 4404 }, { "epoch": 24.749295774647887, "grad_norm": 0.7367788553237915, "learning_rate": 1.3206e-05, "loss": 0.1936, "step": 4405 }, { "epoch": 24.75492957746479, "grad_norm": 0.6993264555931091, "learning_rate": 1.3209000000000002e-05, "loss": 0.1749, "step": 4406 }, { "epoch": 24.760563380281692, "grad_norm": 0.7488996386528015, "learning_rate": 1.3212000000000002e-05, "loss": 0.1654, "step": 4407 }, { "epoch": 24.76619718309859, "grad_norm": 0.8318788409233093, "learning_rate": 1.3215e-05, "loss": 0.1918, "step": 4408 }, { "epoch": 24.771830985915493, "grad_norm": 0.7478909492492676, "learning_rate": 1.3218e-05, "loss": 0.118, "step": 4409 }, { "epoch": 24.777464788732395, "grad_norm": 0.6711634397506714, "learning_rate": 1.3221e-05, "loss": 0.1246, "step": 4410 }, { "epoch": 24.783098591549297, "grad_norm": 0.5322163105010986, "learning_rate": 1.3224e-05, "loss": 0.143, "step": 4411 }, { "epoch": 24.788732394366196, "grad_norm": 0.5422439575195312, "learning_rate": 1.3227e-05, "loss": 0.089, "step": 4412 }, { "epoch": 24.7943661971831, "grad_norm": 0.6988456845283508, "learning_rate": 1.323e-05, "loss": 0.1154, "step": 4413 }, { "epoch": 24.8, "grad_norm": 0.570626974105835, "learning_rate": 1.3233e-05, "loss": 0.1089, "step": 4414 }, { "epoch": 24.805633802816903, "grad_norm": 0.676938533782959, "learning_rate": 1.3236e-05, "loss": 0.1211, "step": 4415 }, { "epoch": 24.8112676056338, "grad_norm": 0.7090728878974915, "learning_rate": 1.3239000000000001e-05, "loss": 0.0827, "step": 4416 }, { "epoch": 24.816901408450704, "grad_norm": 0.5821860432624817, "learning_rate": 1.3242000000000001e-05, "loss": 0.1176, "step": 4417 }, { "epoch": 24.822535211267606, "grad_norm": 1.5881747007369995, "learning_rate": 1.3245000000000001e-05, "loss": 0.1114, "step": 4418 }, { "epoch": 24.82816901408451, "grad_norm": 0.5897836685180664, "learning_rate": 1.3248000000000001e-05, "loss": 0.1217, "step": 4419 }, { "epoch": 24.833802816901407, "grad_norm": 0.6595442295074463, "learning_rate": 1.3250999999999999e-05, "loss": 0.1035, "step": 4420 }, { "epoch": 24.83943661971831, "grad_norm": 0.5533490180969238, "learning_rate": 1.3254e-05, "loss": 0.0679, "step": 4421 }, { "epoch": 24.845070422535212, "grad_norm": 0.5174744725227356, "learning_rate": 1.3257e-05, "loss": 0.0574, "step": 4422 }, { "epoch": 24.850704225352114, "grad_norm": 0.5672239065170288, "learning_rate": 1.326e-05, "loss": 0.0685, "step": 4423 }, { "epoch": 24.856338028169013, "grad_norm": 0.7417545914649963, "learning_rate": 1.3263e-05, "loss": 0.0583, "step": 4424 }, { "epoch": 24.861971830985915, "grad_norm": 0.4661579132080078, "learning_rate": 1.3266e-05, "loss": 0.0702, "step": 4425 }, { "epoch": 24.867605633802818, "grad_norm": 0.7520685791969299, "learning_rate": 1.3269000000000001e-05, "loss": 0.0989, "step": 4426 }, { "epoch": 24.87323943661972, "grad_norm": 0.9018814563751221, "learning_rate": 1.3272000000000001e-05, "loss": 0.0916, "step": 4427 }, { "epoch": 24.87887323943662, "grad_norm": 0.5917441248893738, "learning_rate": 1.3275e-05, "loss": 0.0559, "step": 4428 }, { "epoch": 24.88450704225352, "grad_norm": 0.4581385552883148, "learning_rate": 1.3278e-05, "loss": 0.0439, "step": 4429 }, { "epoch": 24.890140845070423, "grad_norm": 0.48289430141448975, "learning_rate": 1.3281e-05, "loss": 0.0597, "step": 4430 }, { "epoch": 24.895774647887325, "grad_norm": 0.7069322466850281, "learning_rate": 1.3284000000000002e-05, "loss": 0.0718, "step": 4431 }, { "epoch": 24.901408450704224, "grad_norm": 0.4155913293361664, "learning_rate": 1.3287e-05, "loss": 0.048, "step": 4432 }, { "epoch": 24.907042253521126, "grad_norm": 0.5794140100479126, "learning_rate": 1.329e-05, "loss": 0.0564, "step": 4433 }, { "epoch": 24.91267605633803, "grad_norm": 0.6205551624298096, "learning_rate": 1.3293e-05, "loss": 0.1184, "step": 4434 }, { "epoch": 24.91830985915493, "grad_norm": 0.508327066898346, "learning_rate": 1.3296e-05, "loss": 0.0317, "step": 4435 }, { "epoch": 24.92394366197183, "grad_norm": 0.6701633334159851, "learning_rate": 1.3299000000000001e-05, "loss": 0.0605, "step": 4436 }, { "epoch": 24.929577464788732, "grad_norm": 0.7341475486755371, "learning_rate": 1.3302e-05, "loss": 0.0585, "step": 4437 }, { "epoch": 24.935211267605634, "grad_norm": 1.4131909608840942, "learning_rate": 1.3305e-05, "loss": 0.0864, "step": 4438 }, { "epoch": 24.940845070422537, "grad_norm": 0.46392202377319336, "learning_rate": 1.3308e-05, "loss": 0.0448, "step": 4439 }, { "epoch": 24.946478873239435, "grad_norm": 0.6909957528114319, "learning_rate": 1.3311e-05, "loss": 0.0321, "step": 4440 }, { "epoch": 24.952112676056338, "grad_norm": 1.0498911142349243, "learning_rate": 1.3314e-05, "loss": 0.078, "step": 4441 }, { "epoch": 24.95774647887324, "grad_norm": 0.5083705186843872, "learning_rate": 1.3317000000000001e-05, "loss": 0.0361, "step": 4442 }, { "epoch": 24.963380281690142, "grad_norm": 0.8349608182907104, "learning_rate": 1.3320000000000001e-05, "loss": 0.0997, "step": 4443 }, { "epoch": 24.96901408450704, "grad_norm": 0.5595399141311646, "learning_rate": 1.3323000000000001e-05, "loss": 0.051, "step": 4444 }, { "epoch": 24.974647887323943, "grad_norm": 0.4547126293182373, "learning_rate": 1.3325999999999999e-05, "loss": 0.0253, "step": 4445 }, { "epoch": 24.980281690140846, "grad_norm": 0.6131773591041565, "learning_rate": 1.3328999999999999e-05, "loss": 0.0656, "step": 4446 }, { "epoch": 24.985915492957748, "grad_norm": 0.40141838788986206, "learning_rate": 1.3332e-05, "loss": 0.0207, "step": 4447 }, { "epoch": 24.991549295774647, "grad_norm": 1.2533061504364014, "learning_rate": 1.3335e-05, "loss": 0.0506, "step": 4448 }, { "epoch": 24.99718309859155, "grad_norm": 0.6282413601875305, "learning_rate": 1.3338e-05, "loss": 0.0939, "step": 4449 }, { "epoch": 25.0, "grad_norm": 0.837709367275238, "learning_rate": 1.3341e-05, "loss": 0.0485, "step": 4450 }, { "epoch": 25.005633802816902, "grad_norm": 0.9622279405593872, "learning_rate": 1.3344e-05, "loss": 0.2745, "step": 4451 }, { "epoch": 25.011267605633805, "grad_norm": 0.677422046661377, "learning_rate": 1.3347000000000001e-05, "loss": 0.1452, "step": 4452 }, { "epoch": 25.016901408450703, "grad_norm": 0.6199139356613159, "learning_rate": 1.3350000000000001e-05, "loss": 0.1375, "step": 4453 }, { "epoch": 25.022535211267606, "grad_norm": 0.9369131326675415, "learning_rate": 1.3353e-05, "loss": 0.1164, "step": 4454 }, { "epoch": 25.028169014084508, "grad_norm": 0.8229767084121704, "learning_rate": 1.3356e-05, "loss": 0.1961, "step": 4455 }, { "epoch": 25.03380281690141, "grad_norm": 0.7346138954162598, "learning_rate": 1.3359e-05, "loss": 0.1172, "step": 4456 }, { "epoch": 25.03943661971831, "grad_norm": 0.6695093512535095, "learning_rate": 1.3362e-05, "loss": 0.1604, "step": 4457 }, { "epoch": 25.04507042253521, "grad_norm": 0.6114653944969177, "learning_rate": 1.3365e-05, "loss": 0.1003, "step": 4458 }, { "epoch": 25.050704225352113, "grad_norm": 0.5808371901512146, "learning_rate": 1.3368e-05, "loss": 0.1023, "step": 4459 }, { "epoch": 25.056338028169016, "grad_norm": 0.5583010911941528, "learning_rate": 1.3371e-05, "loss": 0.0755, "step": 4460 }, { "epoch": 25.061971830985915, "grad_norm": 0.610510528087616, "learning_rate": 1.3374e-05, "loss": 0.1119, "step": 4461 }, { "epoch": 25.067605633802817, "grad_norm": 0.7616140246391296, "learning_rate": 1.3377e-05, "loss": 0.0996, "step": 4462 }, { "epoch": 25.07323943661972, "grad_norm": 0.6463656425476074, "learning_rate": 1.338e-05, "loss": 0.0987, "step": 4463 }, { "epoch": 25.07887323943662, "grad_norm": 0.5837895274162292, "learning_rate": 1.3383e-05, "loss": 0.081, "step": 4464 }, { "epoch": 25.08450704225352, "grad_norm": 0.39972352981567383, "learning_rate": 1.3386e-05, "loss": 0.0716, "step": 4465 }, { "epoch": 25.090140845070422, "grad_norm": 0.6965437531471252, "learning_rate": 1.3389e-05, "loss": 0.0885, "step": 4466 }, { "epoch": 25.095774647887325, "grad_norm": 0.47353461384773254, "learning_rate": 1.3392000000000002e-05, "loss": 0.0434, "step": 4467 }, { "epoch": 25.101408450704227, "grad_norm": 0.5184068083763123, "learning_rate": 1.3395000000000001e-05, "loss": 0.0711, "step": 4468 }, { "epoch": 25.107042253521126, "grad_norm": 0.5558045506477356, "learning_rate": 1.3398e-05, "loss": 0.0546, "step": 4469 }, { "epoch": 25.112676056338028, "grad_norm": 0.46766766905784607, "learning_rate": 1.3401e-05, "loss": 0.037, "step": 4470 }, { "epoch": 25.11830985915493, "grad_norm": 0.6894230842590332, "learning_rate": 1.3403999999999999e-05, "loss": 0.079, "step": 4471 }, { "epoch": 25.123943661971833, "grad_norm": 0.5659441947937012, "learning_rate": 1.3407e-05, "loss": 0.0699, "step": 4472 }, { "epoch": 25.12957746478873, "grad_norm": 0.538540244102478, "learning_rate": 1.341e-05, "loss": 0.0715, "step": 4473 }, { "epoch": 25.135211267605634, "grad_norm": 0.5424286127090454, "learning_rate": 1.3413e-05, "loss": 0.0441, "step": 4474 }, { "epoch": 25.140845070422536, "grad_norm": 0.540650486946106, "learning_rate": 1.3416e-05, "loss": 0.0432, "step": 4475 }, { "epoch": 25.146478873239438, "grad_norm": 1.0275204181671143, "learning_rate": 1.3419e-05, "loss": 0.0808, "step": 4476 }, { "epoch": 25.152112676056337, "grad_norm": 1.0049437284469604, "learning_rate": 1.3422000000000001e-05, "loss": 0.1225, "step": 4477 }, { "epoch": 25.15774647887324, "grad_norm": 0.5381529927253723, "learning_rate": 1.3425000000000001e-05, "loss": 0.0431, "step": 4478 }, { "epoch": 25.16338028169014, "grad_norm": 0.5191807150840759, "learning_rate": 1.3428000000000001e-05, "loss": 0.0565, "step": 4479 }, { "epoch": 25.169014084507044, "grad_norm": 0.7200739979743958, "learning_rate": 1.3431e-05, "loss": 0.0657, "step": 4480 }, { "epoch": 25.174647887323943, "grad_norm": 0.6347545385360718, "learning_rate": 1.3433999999999999e-05, "loss": 0.0881, "step": 4481 }, { "epoch": 25.180281690140845, "grad_norm": 0.5677999258041382, "learning_rate": 1.3437e-05, "loss": 0.0283, "step": 4482 }, { "epoch": 25.185915492957747, "grad_norm": 0.46137863397598267, "learning_rate": 1.344e-05, "loss": 0.0302, "step": 4483 }, { "epoch": 25.19154929577465, "grad_norm": 0.37847426533699036, "learning_rate": 1.3443e-05, "loss": 0.0605, "step": 4484 }, { "epoch": 25.197183098591548, "grad_norm": 0.41681990027427673, "learning_rate": 1.3446e-05, "loss": 0.0348, "step": 4485 }, { "epoch": 25.20281690140845, "grad_norm": 0.4736464321613312, "learning_rate": 1.3449e-05, "loss": 0.0686, "step": 4486 }, { "epoch": 25.208450704225353, "grad_norm": 0.5821016430854797, "learning_rate": 1.3452000000000001e-05, "loss": 0.0572, "step": 4487 }, { "epoch": 25.214084507042255, "grad_norm": 0.48573997616767883, "learning_rate": 1.3455e-05, "loss": 0.0667, "step": 4488 }, { "epoch": 25.219718309859154, "grad_norm": 0.5718627572059631, "learning_rate": 1.3458e-05, "loss": 0.0697, "step": 4489 }, { "epoch": 25.225352112676056, "grad_norm": 0.4592345952987671, "learning_rate": 1.3461e-05, "loss": 0.0476, "step": 4490 }, { "epoch": 25.23098591549296, "grad_norm": 0.505743145942688, "learning_rate": 1.3464e-05, "loss": 0.0204, "step": 4491 }, { "epoch": 25.23661971830986, "grad_norm": 0.4647597372531891, "learning_rate": 1.3467000000000002e-05, "loss": 0.0361, "step": 4492 }, { "epoch": 25.24225352112676, "grad_norm": 0.8321630954742432, "learning_rate": 1.3470000000000001e-05, "loss": 0.1124, "step": 4493 }, { "epoch": 25.24788732394366, "grad_norm": 1.0013996362686157, "learning_rate": 1.3473e-05, "loss": 0.0387, "step": 4494 }, { "epoch": 25.253521126760564, "grad_norm": 2.0810232162475586, "learning_rate": 1.3476e-05, "loss": 0.1577, "step": 4495 }, { "epoch": 25.259154929577466, "grad_norm": 0.9085041880607605, "learning_rate": 1.3479e-05, "loss": 0.1611, "step": 4496 }, { "epoch": 25.264788732394365, "grad_norm": 0.7644361257553101, "learning_rate": 1.3482e-05, "loss": 0.1242, "step": 4497 }, { "epoch": 25.270422535211267, "grad_norm": 0.7962258458137512, "learning_rate": 1.3485e-05, "loss": 0.1819, "step": 4498 }, { "epoch": 25.27605633802817, "grad_norm": 0.6295580863952637, "learning_rate": 1.3488e-05, "loss": 0.1168, "step": 4499 }, { "epoch": 25.281690140845072, "grad_norm": 0.6330400705337524, "learning_rate": 1.3491e-05, "loss": 0.1493, "step": 4500 }, { "epoch": 25.28732394366197, "grad_norm": 0.7139936089515686, "learning_rate": 1.3494e-05, "loss": 0.1379, "step": 4501 }, { "epoch": 25.292957746478873, "grad_norm": 0.5338876247406006, "learning_rate": 1.3497000000000001e-05, "loss": 0.1167, "step": 4502 }, { "epoch": 25.298591549295775, "grad_norm": 0.7654548287391663, "learning_rate": 1.3500000000000001e-05, "loss": 0.15, "step": 4503 }, { "epoch": 25.304225352112677, "grad_norm": 0.6589233875274658, "learning_rate": 1.3503000000000001e-05, "loss": 0.0893, "step": 4504 }, { "epoch": 25.309859154929576, "grad_norm": 0.8717864751815796, "learning_rate": 1.3506e-05, "loss": 0.13, "step": 4505 }, { "epoch": 25.31549295774648, "grad_norm": 0.539639413356781, "learning_rate": 1.3508999999999999e-05, "loss": 0.0962, "step": 4506 }, { "epoch": 25.32112676056338, "grad_norm": 0.6103701591491699, "learning_rate": 1.3512e-05, "loss": 0.1526, "step": 4507 }, { "epoch": 25.326760563380283, "grad_norm": 0.774753749370575, "learning_rate": 1.3515e-05, "loss": 0.103, "step": 4508 }, { "epoch": 25.33239436619718, "grad_norm": 0.628497302532196, "learning_rate": 1.3518e-05, "loss": 0.0677, "step": 4509 }, { "epoch": 25.338028169014084, "grad_norm": 0.7556802034378052, "learning_rate": 1.3521e-05, "loss": 0.1081, "step": 4510 }, { "epoch": 25.343661971830986, "grad_norm": 0.7775970697402954, "learning_rate": 1.3524e-05, "loss": 0.0951, "step": 4511 }, { "epoch": 25.34929577464789, "grad_norm": 0.6020277738571167, "learning_rate": 1.3527000000000001e-05, "loss": 0.0624, "step": 4512 }, { "epoch": 25.354929577464787, "grad_norm": 0.5365467071533203, "learning_rate": 1.3530000000000001e-05, "loss": 0.0663, "step": 4513 }, { "epoch": 25.36056338028169, "grad_norm": 0.4785071611404419, "learning_rate": 1.3533e-05, "loss": 0.0547, "step": 4514 }, { "epoch": 25.366197183098592, "grad_norm": 0.7020564675331116, "learning_rate": 1.3536e-05, "loss": 0.0835, "step": 4515 }, { "epoch": 25.371830985915494, "grad_norm": 0.5765169858932495, "learning_rate": 1.3539e-05, "loss": 0.0572, "step": 4516 }, { "epoch": 25.377464788732393, "grad_norm": 0.5757632255554199, "learning_rate": 1.3542000000000002e-05, "loss": 0.0521, "step": 4517 }, { "epoch": 25.383098591549295, "grad_norm": 0.5601092576980591, "learning_rate": 1.3545e-05, "loss": 0.0457, "step": 4518 }, { "epoch": 25.388732394366198, "grad_norm": 0.6664250493049622, "learning_rate": 1.3548e-05, "loss": 0.0558, "step": 4519 }, { "epoch": 25.3943661971831, "grad_norm": 0.7967733144760132, "learning_rate": 1.3551e-05, "loss": 0.0616, "step": 4520 }, { "epoch": 25.4, "grad_norm": 0.39730119705200195, "learning_rate": 1.3554e-05, "loss": 0.0293, "step": 4521 }, { "epoch": 25.4056338028169, "grad_norm": 0.5190859436988831, "learning_rate": 1.3557e-05, "loss": 0.0449, "step": 4522 }, { "epoch": 25.411267605633803, "grad_norm": 0.4077153205871582, "learning_rate": 1.356e-05, "loss": 0.0355, "step": 4523 }, { "epoch": 25.416901408450705, "grad_norm": 0.6936213374137878, "learning_rate": 1.3563e-05, "loss": 0.0718, "step": 4524 }, { "epoch": 25.422535211267604, "grad_norm": 0.4700467586517334, "learning_rate": 1.3566e-05, "loss": 0.0364, "step": 4525 }, { "epoch": 25.428169014084506, "grad_norm": 0.7089601755142212, "learning_rate": 1.3569e-05, "loss": 0.0432, "step": 4526 }, { "epoch": 25.43380281690141, "grad_norm": 0.5077939629554749, "learning_rate": 1.3572000000000002e-05, "loss": 0.0468, "step": 4527 }, { "epoch": 25.43943661971831, "grad_norm": 0.47599098086357117, "learning_rate": 1.3575000000000001e-05, "loss": 0.0696, "step": 4528 }, { "epoch": 25.44507042253521, "grad_norm": 0.3906319737434387, "learning_rate": 1.3578000000000001e-05, "loss": 0.0285, "step": 4529 }, { "epoch": 25.450704225352112, "grad_norm": 0.6554809808731079, "learning_rate": 1.3581000000000001e-05, "loss": 0.0372, "step": 4530 }, { "epoch": 25.456338028169014, "grad_norm": 0.6406766772270203, "learning_rate": 1.3583999999999999e-05, "loss": 0.0631, "step": 4531 }, { "epoch": 25.461971830985917, "grad_norm": 0.537358820438385, "learning_rate": 1.3587e-05, "loss": 0.0274, "step": 4532 }, { "epoch": 25.467605633802815, "grad_norm": 0.5403721928596497, "learning_rate": 1.359e-05, "loss": 0.0416, "step": 4533 }, { "epoch": 25.473239436619718, "grad_norm": 0.6334729194641113, "learning_rate": 1.3593e-05, "loss": 0.0585, "step": 4534 }, { "epoch": 25.47887323943662, "grad_norm": 0.7527462244033813, "learning_rate": 1.3596e-05, "loss": 0.0337, "step": 4535 }, { "epoch": 25.484507042253522, "grad_norm": 0.8371369242668152, "learning_rate": 1.3599e-05, "loss": 0.0506, "step": 4536 }, { "epoch": 25.49014084507042, "grad_norm": 0.6400368213653564, "learning_rate": 1.3602000000000001e-05, "loss": 0.0663, "step": 4537 }, { "epoch": 25.495774647887323, "grad_norm": 0.5540046691894531, "learning_rate": 1.3605000000000001e-05, "loss": 0.0384, "step": 4538 }, { "epoch": 25.501408450704226, "grad_norm": 0.6911896467208862, "learning_rate": 1.3608e-05, "loss": 0.1753, "step": 4539 }, { "epoch": 25.507042253521128, "grad_norm": 0.705941915512085, "learning_rate": 1.3611e-05, "loss": 0.1445, "step": 4540 }, { "epoch": 25.512676056338027, "grad_norm": 0.560214102268219, "learning_rate": 1.3614e-05, "loss": 0.1161, "step": 4541 }, { "epoch": 25.51830985915493, "grad_norm": 0.6482544541358948, "learning_rate": 1.3617000000000002e-05, "loss": 0.1614, "step": 4542 }, { "epoch": 25.52394366197183, "grad_norm": 0.7832469344139099, "learning_rate": 1.362e-05, "loss": 0.1425, "step": 4543 }, { "epoch": 25.529577464788733, "grad_norm": 0.5709983706474304, "learning_rate": 1.3623e-05, "loss": 0.0969, "step": 4544 }, { "epoch": 25.535211267605632, "grad_norm": 0.6297059655189514, "learning_rate": 1.3626e-05, "loss": 0.1077, "step": 4545 }, { "epoch": 25.540845070422534, "grad_norm": 0.7938288450241089, "learning_rate": 1.3629e-05, "loss": 0.2168, "step": 4546 }, { "epoch": 25.546478873239437, "grad_norm": 0.7782179117202759, "learning_rate": 1.3632000000000001e-05, "loss": 0.1172, "step": 4547 }, { "epoch": 25.55211267605634, "grad_norm": 0.6576704382896423, "learning_rate": 1.3635e-05, "loss": 0.101, "step": 4548 }, { "epoch": 25.557746478873238, "grad_norm": 0.5366101861000061, "learning_rate": 1.3638e-05, "loss": 0.0964, "step": 4549 }, { "epoch": 25.56338028169014, "grad_norm": 0.6728852391242981, "learning_rate": 1.3641e-05, "loss": 0.0994, "step": 4550 }, { "epoch": 25.569014084507042, "grad_norm": 0.7848027348518372, "learning_rate": 1.3644e-05, "loss": 0.1154, "step": 4551 }, { "epoch": 25.574647887323945, "grad_norm": 0.5365052819252014, "learning_rate": 1.3647000000000002e-05, "loss": 0.0638, "step": 4552 }, { "epoch": 25.580281690140843, "grad_norm": 0.7895044088363647, "learning_rate": 1.3650000000000001e-05, "loss": 0.0867, "step": 4553 }, { "epoch": 25.585915492957746, "grad_norm": 0.6562526822090149, "learning_rate": 1.3653000000000001e-05, "loss": 0.1192, "step": 4554 }, { "epoch": 25.591549295774648, "grad_norm": 0.610165536403656, "learning_rate": 1.3656e-05, "loss": 0.0671, "step": 4555 }, { "epoch": 25.59718309859155, "grad_norm": 0.5959428548812866, "learning_rate": 1.3659e-05, "loss": 0.1009, "step": 4556 }, { "epoch": 25.60281690140845, "grad_norm": 0.5726417303085327, "learning_rate": 1.3662e-05, "loss": 0.0833, "step": 4557 }, { "epoch": 25.60845070422535, "grad_norm": 0.5875435471534729, "learning_rate": 1.3665e-05, "loss": 0.0702, "step": 4558 }, { "epoch": 25.614084507042254, "grad_norm": 0.685372531414032, "learning_rate": 1.3668e-05, "loss": 0.0664, "step": 4559 }, { "epoch": 25.619718309859156, "grad_norm": 0.8147499561309814, "learning_rate": 1.3671e-05, "loss": 0.0754, "step": 4560 }, { "epoch": 25.625352112676055, "grad_norm": 1.0419206619262695, "learning_rate": 1.3674e-05, "loss": 0.1155, "step": 4561 }, { "epoch": 25.630985915492957, "grad_norm": 0.6940472722053528, "learning_rate": 1.3677000000000001e-05, "loss": 0.0392, "step": 4562 }, { "epoch": 25.63661971830986, "grad_norm": 0.4736436903476715, "learning_rate": 1.3680000000000001e-05, "loss": 0.0711, "step": 4563 }, { "epoch": 25.64225352112676, "grad_norm": 0.6038382649421692, "learning_rate": 1.3683000000000001e-05, "loss": 0.0873, "step": 4564 }, { "epoch": 25.647887323943664, "grad_norm": 0.6998777985572815, "learning_rate": 1.3686e-05, "loss": 0.0755, "step": 4565 }, { "epoch": 25.653521126760563, "grad_norm": 0.5477784276008606, "learning_rate": 1.3689e-05, "loss": 0.038, "step": 4566 }, { "epoch": 25.659154929577465, "grad_norm": 0.3920752704143524, "learning_rate": 1.3691999999999999e-05, "loss": 0.0294, "step": 4567 }, { "epoch": 25.664788732394367, "grad_norm": 0.6524912714958191, "learning_rate": 1.3695e-05, "loss": 0.0839, "step": 4568 }, { "epoch": 25.670422535211266, "grad_norm": 0.5302411913871765, "learning_rate": 1.3698e-05, "loss": 0.0373, "step": 4569 }, { "epoch": 25.676056338028168, "grad_norm": 0.4632780849933624, "learning_rate": 1.3701e-05, "loss": 0.0707, "step": 4570 }, { "epoch": 25.68169014084507, "grad_norm": 0.46422308683395386, "learning_rate": 1.3704e-05, "loss": 0.0909, "step": 4571 }, { "epoch": 25.687323943661973, "grad_norm": 0.6508498191833496, "learning_rate": 1.3707e-05, "loss": 0.0847, "step": 4572 }, { "epoch": 25.692957746478875, "grad_norm": 0.5306347012519836, "learning_rate": 1.3710000000000001e-05, "loss": 0.0701, "step": 4573 }, { "epoch": 25.698591549295774, "grad_norm": 1.135103464126587, "learning_rate": 1.3713e-05, "loss": 0.084, "step": 4574 }, { "epoch": 25.704225352112676, "grad_norm": 0.6349331736564636, "learning_rate": 1.3716e-05, "loss": 0.0504, "step": 4575 }, { "epoch": 25.70985915492958, "grad_norm": 0.6101451516151428, "learning_rate": 1.3719e-05, "loss": 0.0274, "step": 4576 }, { "epoch": 25.71549295774648, "grad_norm": 0.6529644727706909, "learning_rate": 1.3722e-05, "loss": 0.0431, "step": 4577 }, { "epoch": 25.72112676056338, "grad_norm": 0.9308462142944336, "learning_rate": 1.3725000000000002e-05, "loss": 0.1302, "step": 4578 }, { "epoch": 25.72676056338028, "grad_norm": 0.4346456527709961, "learning_rate": 1.3728000000000001e-05, "loss": 0.0413, "step": 4579 }, { "epoch": 25.732394366197184, "grad_norm": 0.714582622051239, "learning_rate": 1.3731e-05, "loss": 0.0464, "step": 4580 }, { "epoch": 25.738028169014086, "grad_norm": 0.6451097726821899, "learning_rate": 1.3734e-05, "loss": 0.0449, "step": 4581 }, { "epoch": 25.743661971830985, "grad_norm": 0.8295645713806152, "learning_rate": 1.3736999999999999e-05, "loss": 0.0567, "step": 4582 }, { "epoch": 25.749295774647887, "grad_norm": 0.7797640562057495, "learning_rate": 1.374e-05, "loss": 0.1665, "step": 4583 }, { "epoch": 25.75492957746479, "grad_norm": 1.3777813911437988, "learning_rate": 1.3743e-05, "loss": 0.158, "step": 4584 }, { "epoch": 25.760563380281692, "grad_norm": 0.7480785846710205, "learning_rate": 1.3746e-05, "loss": 0.1911, "step": 4585 }, { "epoch": 25.76619718309859, "grad_norm": 0.7867670059204102, "learning_rate": 1.3749e-05, "loss": 0.1337, "step": 4586 }, { "epoch": 25.771830985915493, "grad_norm": 0.5981540679931641, "learning_rate": 1.3752e-05, "loss": 0.1181, "step": 4587 }, { "epoch": 25.777464788732395, "grad_norm": 1.1376851797103882, "learning_rate": 1.3755000000000001e-05, "loss": 0.1404, "step": 4588 }, { "epoch": 25.783098591549297, "grad_norm": 0.7264759540557861, "learning_rate": 1.3758000000000001e-05, "loss": 0.1038, "step": 4589 }, { "epoch": 25.788732394366196, "grad_norm": 0.8542400598526001, "learning_rate": 1.3761000000000001e-05, "loss": 0.1235, "step": 4590 }, { "epoch": 25.7943661971831, "grad_norm": 0.7897334098815918, "learning_rate": 1.3764e-05, "loss": 0.1104, "step": 4591 }, { "epoch": 25.8, "grad_norm": 0.574367880821228, "learning_rate": 1.3766999999999999e-05, "loss": 0.0731, "step": 4592 }, { "epoch": 25.805633802816903, "grad_norm": 0.6261805295944214, "learning_rate": 1.377e-05, "loss": 0.1464, "step": 4593 }, { "epoch": 25.8112676056338, "grad_norm": 0.5870782732963562, "learning_rate": 1.3773e-05, "loss": 0.0858, "step": 4594 }, { "epoch": 25.816901408450704, "grad_norm": 0.7116224765777588, "learning_rate": 1.3776e-05, "loss": 0.1335, "step": 4595 }, { "epoch": 25.822535211267606, "grad_norm": 0.919532060623169, "learning_rate": 1.3779e-05, "loss": 0.109, "step": 4596 }, { "epoch": 25.82816901408451, "grad_norm": 1.067037582397461, "learning_rate": 1.3782e-05, "loss": 0.1152, "step": 4597 }, { "epoch": 25.833802816901407, "grad_norm": 0.6982028484344482, "learning_rate": 1.3785000000000001e-05, "loss": 0.0488, "step": 4598 }, { "epoch": 25.83943661971831, "grad_norm": 0.8057355880737305, "learning_rate": 1.3788e-05, "loss": 0.1255, "step": 4599 }, { "epoch": 25.845070422535212, "grad_norm": 0.7116346955299377, "learning_rate": 1.3791e-05, "loss": 0.0865, "step": 4600 }, { "epoch": 25.850704225352114, "grad_norm": 0.7585561871528625, "learning_rate": 1.3794e-05, "loss": 0.0767, "step": 4601 }, { "epoch": 25.856338028169013, "grad_norm": 1.8033246994018555, "learning_rate": 1.3797e-05, "loss": 0.0867, "step": 4602 }, { "epoch": 25.861971830985915, "grad_norm": 0.8122465014457703, "learning_rate": 1.3800000000000002e-05, "loss": 0.0864, "step": 4603 }, { "epoch": 25.867605633802818, "grad_norm": 0.5065032839775085, "learning_rate": 1.3803e-05, "loss": 0.0688, "step": 4604 }, { "epoch": 25.87323943661972, "grad_norm": 0.6459835171699524, "learning_rate": 1.3806e-05, "loss": 0.1014, "step": 4605 }, { "epoch": 25.87887323943662, "grad_norm": 0.6896846890449524, "learning_rate": 1.3809e-05, "loss": 0.0552, "step": 4606 }, { "epoch": 25.88450704225352, "grad_norm": 1.152369499206543, "learning_rate": 1.3812e-05, "loss": 0.0706, "step": 4607 }, { "epoch": 25.890140845070423, "grad_norm": 0.6657989621162415, "learning_rate": 1.3815e-05, "loss": 0.0394, "step": 4608 }, { "epoch": 25.895774647887325, "grad_norm": 0.5068500638008118, "learning_rate": 1.3818e-05, "loss": 0.0521, "step": 4609 }, { "epoch": 25.901408450704224, "grad_norm": 0.7908946871757507, "learning_rate": 1.3821e-05, "loss": 0.0517, "step": 4610 }, { "epoch": 25.907042253521126, "grad_norm": 0.7849494218826294, "learning_rate": 1.3824e-05, "loss": 0.0494, "step": 4611 }, { "epoch": 25.91267605633803, "grad_norm": 0.460408478975296, "learning_rate": 1.3827e-05, "loss": 0.046, "step": 4612 }, { "epoch": 25.91830985915493, "grad_norm": 0.5860891938209534, "learning_rate": 1.3830000000000001e-05, "loss": 0.0316, "step": 4613 }, { "epoch": 25.92394366197183, "grad_norm": 0.9372726678848267, "learning_rate": 1.3833000000000001e-05, "loss": 0.0877, "step": 4614 }, { "epoch": 25.929577464788732, "grad_norm": 0.7353770732879639, "learning_rate": 1.3836000000000001e-05, "loss": 0.0756, "step": 4615 }, { "epoch": 25.935211267605634, "grad_norm": 0.6797667145729065, "learning_rate": 1.3839e-05, "loss": 0.0623, "step": 4616 }, { "epoch": 25.940845070422537, "grad_norm": 0.7750339508056641, "learning_rate": 1.3841999999999999e-05, "loss": 0.0824, "step": 4617 }, { "epoch": 25.946478873239435, "grad_norm": 0.46512365341186523, "learning_rate": 1.3845e-05, "loss": 0.0422, "step": 4618 }, { "epoch": 25.952112676056338, "grad_norm": 0.7724330425262451, "learning_rate": 1.3848e-05, "loss": 0.0582, "step": 4619 }, { "epoch": 25.95774647887324, "grad_norm": 0.4830438196659088, "learning_rate": 1.3851e-05, "loss": 0.0214, "step": 4620 }, { "epoch": 25.963380281690142, "grad_norm": 0.3577374815940857, "learning_rate": 1.3854e-05, "loss": 0.0183, "step": 4621 }, { "epoch": 25.96901408450704, "grad_norm": 0.2956916093826294, "learning_rate": 1.3857e-05, "loss": 0.0169, "step": 4622 }, { "epoch": 25.974647887323943, "grad_norm": 0.5505967140197754, "learning_rate": 1.3860000000000001e-05, "loss": 0.0234, "step": 4623 }, { "epoch": 25.980281690140846, "grad_norm": 0.4710119962692261, "learning_rate": 1.3863000000000001e-05, "loss": 0.0366, "step": 4624 }, { "epoch": 25.985915492957748, "grad_norm": 0.34738510847091675, "learning_rate": 1.3866e-05, "loss": 0.0204, "step": 4625 }, { "epoch": 25.991549295774647, "grad_norm": 0.6722044348716736, "learning_rate": 1.3869e-05, "loss": 0.0254, "step": 4626 }, { "epoch": 25.99718309859155, "grad_norm": 1.251830816268921, "learning_rate": 1.3872e-05, "loss": 0.1025, "step": 4627 }, { "epoch": 26.0, "grad_norm": 0.8064362406730652, "learning_rate": 1.3875000000000002e-05, "loss": 0.0205, "step": 4628 }, { "epoch": 26.005633802816902, "grad_norm": 0.7523513436317444, "learning_rate": 1.3878e-05, "loss": 0.1805, "step": 4629 }, { "epoch": 26.011267605633805, "grad_norm": 0.9175093173980713, "learning_rate": 1.3881e-05, "loss": 0.1642, "step": 4630 }, { "epoch": 26.016901408450703, "grad_norm": 0.8630015850067139, "learning_rate": 1.3884e-05, "loss": 0.1189, "step": 4631 }, { "epoch": 26.022535211267606, "grad_norm": 0.5693211555480957, "learning_rate": 1.3887e-05, "loss": 0.0877, "step": 4632 }, { "epoch": 26.028169014084508, "grad_norm": 0.7993447184562683, "learning_rate": 1.389e-05, "loss": 0.1326, "step": 4633 }, { "epoch": 26.03380281690141, "grad_norm": 0.7916780710220337, "learning_rate": 1.3893e-05, "loss": 0.1481, "step": 4634 }, { "epoch": 26.03943661971831, "grad_norm": 1.1048345565795898, "learning_rate": 1.3896e-05, "loss": 0.1325, "step": 4635 }, { "epoch": 26.04507042253521, "grad_norm": 0.5347261428833008, "learning_rate": 1.3899e-05, "loss": 0.0786, "step": 4636 }, { "epoch": 26.050704225352113, "grad_norm": 0.6480002999305725, "learning_rate": 1.3902e-05, "loss": 0.0952, "step": 4637 }, { "epoch": 26.056338028169016, "grad_norm": 0.6551711559295654, "learning_rate": 1.3905000000000002e-05, "loss": 0.1151, "step": 4638 }, { "epoch": 26.061971830985915, "grad_norm": 0.5891906023025513, "learning_rate": 1.3908000000000001e-05, "loss": 0.0931, "step": 4639 }, { "epoch": 26.067605633802817, "grad_norm": 0.8060526251792908, "learning_rate": 1.3911000000000001e-05, "loss": 0.0832, "step": 4640 }, { "epoch": 26.07323943661972, "grad_norm": 1.0945286750793457, "learning_rate": 1.3914e-05, "loss": 0.1706, "step": 4641 }, { "epoch": 26.07887323943662, "grad_norm": 3.0055391788482666, "learning_rate": 1.3916999999999999e-05, "loss": 0.0796, "step": 4642 }, { "epoch": 26.08450704225352, "grad_norm": 0.5739682912826538, "learning_rate": 1.392e-05, "loss": 0.0573, "step": 4643 }, { "epoch": 26.090140845070422, "grad_norm": 0.6680814027786255, "learning_rate": 1.3923e-05, "loss": 0.0981, "step": 4644 }, { "epoch": 26.095774647887325, "grad_norm": 0.5477608442306519, "learning_rate": 1.3926e-05, "loss": 0.0965, "step": 4645 }, { "epoch": 26.101408450704227, "grad_norm": 0.5844956040382385, "learning_rate": 1.3929e-05, "loss": 0.0459, "step": 4646 }, { "epoch": 26.107042253521126, "grad_norm": 0.6256443858146667, "learning_rate": 1.3932e-05, "loss": 0.098, "step": 4647 }, { "epoch": 26.112676056338028, "grad_norm": 0.6033890247344971, "learning_rate": 1.3935000000000001e-05, "loss": 0.0544, "step": 4648 }, { "epoch": 26.11830985915493, "grad_norm": 0.48001837730407715, "learning_rate": 1.3938000000000001e-05, "loss": 0.0473, "step": 4649 }, { "epoch": 26.123943661971833, "grad_norm": 0.6203490495681763, "learning_rate": 1.3941000000000001e-05, "loss": 0.0605, "step": 4650 }, { "epoch": 26.12957746478873, "grad_norm": 0.757520854473114, "learning_rate": 1.3944e-05, "loss": 0.0658, "step": 4651 }, { "epoch": 26.135211267605634, "grad_norm": 0.5577385425567627, "learning_rate": 1.3947e-05, "loss": 0.0912, "step": 4652 }, { "epoch": 26.140845070422536, "grad_norm": 0.5306966304779053, "learning_rate": 1.395e-05, "loss": 0.0426, "step": 4653 }, { "epoch": 26.146478873239438, "grad_norm": 1.2447303533554077, "learning_rate": 1.3953e-05, "loss": 0.038, "step": 4654 }, { "epoch": 26.152112676056337, "grad_norm": 0.4432101845741272, "learning_rate": 1.3956e-05, "loss": 0.0274, "step": 4655 }, { "epoch": 26.15774647887324, "grad_norm": 0.6046532988548279, "learning_rate": 1.3959e-05, "loss": 0.0497, "step": 4656 }, { "epoch": 26.16338028169014, "grad_norm": 0.7679754495620728, "learning_rate": 1.3962e-05, "loss": 0.0516, "step": 4657 }, { "epoch": 26.169014084507044, "grad_norm": 0.44065138697624207, "learning_rate": 1.3965000000000001e-05, "loss": 0.0549, "step": 4658 }, { "epoch": 26.174647887323943, "grad_norm": 0.5914302468299866, "learning_rate": 1.3968e-05, "loss": 0.0551, "step": 4659 }, { "epoch": 26.180281690140845, "grad_norm": 0.7548152208328247, "learning_rate": 1.3971e-05, "loss": 0.0908, "step": 4660 }, { "epoch": 26.185915492957747, "grad_norm": 0.9631087779998779, "learning_rate": 1.3974e-05, "loss": 0.0482, "step": 4661 }, { "epoch": 26.19154929577465, "grad_norm": 0.6280474066734314, "learning_rate": 1.3977e-05, "loss": 0.0429, "step": 4662 }, { "epoch": 26.197183098591548, "grad_norm": 0.3833680748939514, "learning_rate": 1.3980000000000002e-05, "loss": 0.0393, "step": 4663 }, { "epoch": 26.20281690140845, "grad_norm": 0.4500342607498169, "learning_rate": 1.3983000000000001e-05, "loss": 0.0333, "step": 4664 }, { "epoch": 26.208450704225353, "grad_norm": 1.3899935483932495, "learning_rate": 1.3986000000000001e-05, "loss": 0.074, "step": 4665 }, { "epoch": 26.214084507042255, "grad_norm": 0.6622061133384705, "learning_rate": 1.3989e-05, "loss": 0.0464, "step": 4666 }, { "epoch": 26.219718309859154, "grad_norm": 0.5292353630065918, "learning_rate": 1.3992e-05, "loss": 0.0503, "step": 4667 }, { "epoch": 26.225352112676056, "grad_norm": 0.47329628467559814, "learning_rate": 1.3995e-05, "loss": 0.0481, "step": 4668 }, { "epoch": 26.23098591549296, "grad_norm": 0.5561329126358032, "learning_rate": 1.3998e-05, "loss": 0.0282, "step": 4669 }, { "epoch": 26.23661971830986, "grad_norm": 0.5321069955825806, "learning_rate": 1.4001e-05, "loss": 0.0479, "step": 4670 }, { "epoch": 26.24225352112676, "grad_norm": 0.4874018728733063, "learning_rate": 1.4004e-05, "loss": 0.074, "step": 4671 }, { "epoch": 26.24788732394366, "grad_norm": 0.7873947024345398, "learning_rate": 1.4007e-05, "loss": 0.0393, "step": 4672 }, { "epoch": 26.253521126760564, "grad_norm": 1.2015842199325562, "learning_rate": 1.4010000000000001e-05, "loss": 0.3038, "step": 4673 }, { "epoch": 26.259154929577466, "grad_norm": 0.714046835899353, "learning_rate": 1.4013000000000001e-05, "loss": 0.1061, "step": 4674 }, { "epoch": 26.264788732394365, "grad_norm": 0.8418575525283813, "learning_rate": 1.4016000000000001e-05, "loss": 0.1529, "step": 4675 }, { "epoch": 26.270422535211267, "grad_norm": 0.7827997803688049, "learning_rate": 1.4019e-05, "loss": 0.176, "step": 4676 }, { "epoch": 26.27605633802817, "grad_norm": 0.6718109846115112, "learning_rate": 1.4022e-05, "loss": 0.1008, "step": 4677 }, { "epoch": 26.281690140845072, "grad_norm": 0.6738607883453369, "learning_rate": 1.4025e-05, "loss": 0.1282, "step": 4678 }, { "epoch": 26.28732394366197, "grad_norm": 0.7049904465675354, "learning_rate": 1.4028e-05, "loss": 0.1352, "step": 4679 }, { "epoch": 26.292957746478873, "grad_norm": 0.8437009453773499, "learning_rate": 1.4031e-05, "loss": 0.1339, "step": 4680 }, { "epoch": 26.298591549295775, "grad_norm": 0.748948335647583, "learning_rate": 1.4034e-05, "loss": 0.0998, "step": 4681 }, { "epoch": 26.304225352112677, "grad_norm": 0.7049367427825928, "learning_rate": 1.4037e-05, "loss": 0.1604, "step": 4682 }, { "epoch": 26.309859154929576, "grad_norm": 0.7537553310394287, "learning_rate": 1.4040000000000001e-05, "loss": 0.0849, "step": 4683 }, { "epoch": 26.31549295774648, "grad_norm": 3.3509132862091064, "learning_rate": 1.4043000000000001e-05, "loss": 0.0987, "step": 4684 }, { "epoch": 26.32112676056338, "grad_norm": 0.5683264136314392, "learning_rate": 1.4046e-05, "loss": 0.0813, "step": 4685 }, { "epoch": 26.326760563380283, "grad_norm": 0.6593437790870667, "learning_rate": 1.4049e-05, "loss": 0.0851, "step": 4686 }, { "epoch": 26.33239436619718, "grad_norm": 0.821178138256073, "learning_rate": 1.4052e-05, "loss": 0.1112, "step": 4687 }, { "epoch": 26.338028169014084, "grad_norm": 0.9072875380516052, "learning_rate": 1.4055000000000002e-05, "loss": 0.0964, "step": 4688 }, { "epoch": 26.343661971830986, "grad_norm": 0.5579844117164612, "learning_rate": 1.4058000000000002e-05, "loss": 0.073, "step": 4689 }, { "epoch": 26.34929577464789, "grad_norm": 0.5828062295913696, "learning_rate": 1.4061e-05, "loss": 0.0984, "step": 4690 }, { "epoch": 26.354929577464787, "grad_norm": 0.5473426580429077, "learning_rate": 1.4064e-05, "loss": 0.0639, "step": 4691 }, { "epoch": 26.36056338028169, "grad_norm": 0.6506588459014893, "learning_rate": 1.4067e-05, "loss": 0.0734, "step": 4692 }, { "epoch": 26.366197183098592, "grad_norm": 1.1171239614486694, "learning_rate": 1.4069999999999999e-05, "loss": 0.0763, "step": 4693 }, { "epoch": 26.371830985915494, "grad_norm": 0.6510008573532104, "learning_rate": 1.4073e-05, "loss": 0.0794, "step": 4694 }, { "epoch": 26.377464788732393, "grad_norm": 0.5402976870536804, "learning_rate": 1.4076e-05, "loss": 0.0935, "step": 4695 }, { "epoch": 26.383098591549295, "grad_norm": 0.6134204268455505, "learning_rate": 1.4079e-05, "loss": 0.0599, "step": 4696 }, { "epoch": 26.388732394366198, "grad_norm": 0.6898455023765564, "learning_rate": 1.4082e-05, "loss": 0.0691, "step": 4697 }, { "epoch": 26.3943661971831, "grad_norm": 0.4620243012905121, "learning_rate": 1.4085e-05, "loss": 0.0581, "step": 4698 }, { "epoch": 26.4, "grad_norm": 1.4602699279785156, "learning_rate": 1.4088000000000001e-05, "loss": 0.0574, "step": 4699 }, { "epoch": 26.4056338028169, "grad_norm": 0.4989027678966522, "learning_rate": 1.4091000000000001e-05, "loss": 0.0429, "step": 4700 }, { "epoch": 26.411267605633803, "grad_norm": 0.5243587493896484, "learning_rate": 1.4094000000000001e-05, "loss": 0.0381, "step": 4701 }, { "epoch": 26.416901408450705, "grad_norm": 0.8976671099662781, "learning_rate": 1.4097e-05, "loss": 0.0719, "step": 4702 }, { "epoch": 26.422535211267604, "grad_norm": 0.6932439208030701, "learning_rate": 1.4099999999999999e-05, "loss": 0.0679, "step": 4703 }, { "epoch": 26.428169014084506, "grad_norm": 0.5292612314224243, "learning_rate": 1.4103e-05, "loss": 0.031, "step": 4704 }, { "epoch": 26.43380281690141, "grad_norm": 1.179229497909546, "learning_rate": 1.4106e-05, "loss": 0.0882, "step": 4705 }, { "epoch": 26.43943661971831, "grad_norm": 0.5527491569519043, "learning_rate": 1.4109e-05, "loss": 0.0295, "step": 4706 }, { "epoch": 26.44507042253521, "grad_norm": 0.5614479780197144, "learning_rate": 1.4112e-05, "loss": 0.0319, "step": 4707 }, { "epoch": 26.450704225352112, "grad_norm": 0.6183871626853943, "learning_rate": 1.4115e-05, "loss": 0.0447, "step": 4708 }, { "epoch": 26.456338028169014, "grad_norm": 0.525558590888977, "learning_rate": 1.4118000000000001e-05, "loss": 0.0447, "step": 4709 }, { "epoch": 26.461971830985917, "grad_norm": 0.4371645152568817, "learning_rate": 1.4121e-05, "loss": 0.0292, "step": 4710 }, { "epoch": 26.467605633802815, "grad_norm": 0.899486243724823, "learning_rate": 1.4124e-05, "loss": 0.0288, "step": 4711 }, { "epoch": 26.473239436619718, "grad_norm": 0.8152267336845398, "learning_rate": 1.4127e-05, "loss": 0.0351, "step": 4712 }, { "epoch": 26.47887323943662, "grad_norm": 0.7296839952468872, "learning_rate": 1.413e-05, "loss": 0.023, "step": 4713 }, { "epoch": 26.484507042253522, "grad_norm": 0.5452113747596741, "learning_rate": 1.4133000000000002e-05, "loss": 0.0311, "step": 4714 }, { "epoch": 26.49014084507042, "grad_norm": 0.5012322664260864, "learning_rate": 1.4136e-05, "loss": 0.0709, "step": 4715 }, { "epoch": 26.495774647887323, "grad_norm": 1.423704981803894, "learning_rate": 1.4139e-05, "loss": 0.0533, "step": 4716 }, { "epoch": 26.501408450704226, "grad_norm": 0.7372785210609436, "learning_rate": 1.4142e-05, "loss": 0.1578, "step": 4717 }, { "epoch": 26.507042253521128, "grad_norm": 0.6098973751068115, "learning_rate": 1.4145e-05, "loss": 0.153, "step": 4718 }, { "epoch": 26.512676056338027, "grad_norm": 0.6529301404953003, "learning_rate": 1.4148e-05, "loss": 0.1645, "step": 4719 }, { "epoch": 26.51830985915493, "grad_norm": 0.6626883149147034, "learning_rate": 1.4151e-05, "loss": 0.1767, "step": 4720 }, { "epoch": 26.52394366197183, "grad_norm": 0.8549914956092834, "learning_rate": 1.4154e-05, "loss": 0.1691, "step": 4721 }, { "epoch": 26.529577464788733, "grad_norm": 0.8288560509681702, "learning_rate": 1.4157e-05, "loss": 0.1343, "step": 4722 }, { "epoch": 26.535211267605632, "grad_norm": 0.6477600336074829, "learning_rate": 1.416e-05, "loss": 0.1554, "step": 4723 }, { "epoch": 26.540845070422534, "grad_norm": 0.5830531120300293, "learning_rate": 1.4163000000000001e-05, "loss": 0.098, "step": 4724 }, { "epoch": 26.546478873239437, "grad_norm": 0.6624829769134521, "learning_rate": 1.4166000000000001e-05, "loss": 0.1441, "step": 4725 }, { "epoch": 26.55211267605634, "grad_norm": 0.554343581199646, "learning_rate": 1.4169000000000001e-05, "loss": 0.0619, "step": 4726 }, { "epoch": 26.557746478873238, "grad_norm": 0.6731516718864441, "learning_rate": 1.4172e-05, "loss": 0.0931, "step": 4727 }, { "epoch": 26.56338028169014, "grad_norm": 0.535407304763794, "learning_rate": 1.4174999999999999e-05, "loss": 0.082, "step": 4728 }, { "epoch": 26.569014084507042, "grad_norm": 0.5767724514007568, "learning_rate": 1.4178e-05, "loss": 0.1091, "step": 4729 }, { "epoch": 26.574647887323945, "grad_norm": 0.6386505961418152, "learning_rate": 1.4181e-05, "loss": 0.0721, "step": 4730 }, { "epoch": 26.580281690140843, "grad_norm": 0.6831379532814026, "learning_rate": 1.4184e-05, "loss": 0.0773, "step": 4731 }, { "epoch": 26.585915492957746, "grad_norm": 0.5299614071846008, "learning_rate": 1.4187e-05, "loss": 0.0699, "step": 4732 }, { "epoch": 26.591549295774648, "grad_norm": 0.6666359305381775, "learning_rate": 1.419e-05, "loss": 0.068, "step": 4733 }, { "epoch": 26.59718309859155, "grad_norm": 0.8679301142692566, "learning_rate": 1.4193000000000001e-05, "loss": 0.0581, "step": 4734 }, { "epoch": 26.60281690140845, "grad_norm": 0.7485905289649963, "learning_rate": 1.4196000000000001e-05, "loss": 0.0689, "step": 4735 }, { "epoch": 26.60845070422535, "grad_norm": 0.6004778742790222, "learning_rate": 1.4199e-05, "loss": 0.0616, "step": 4736 }, { "epoch": 26.614084507042254, "grad_norm": 0.7881636619567871, "learning_rate": 1.4202e-05, "loss": 0.0793, "step": 4737 }, { "epoch": 26.619718309859156, "grad_norm": 0.6414483785629272, "learning_rate": 1.4205e-05, "loss": 0.0752, "step": 4738 }, { "epoch": 26.625352112676055, "grad_norm": 0.5661441087722778, "learning_rate": 1.4208e-05, "loss": 0.0406, "step": 4739 }, { "epoch": 26.630985915492957, "grad_norm": 0.47907671332359314, "learning_rate": 1.4211e-05, "loss": 0.03, "step": 4740 }, { "epoch": 26.63661971830986, "grad_norm": 1.1401803493499756, "learning_rate": 1.4214e-05, "loss": 0.0444, "step": 4741 }, { "epoch": 26.64225352112676, "grad_norm": 0.5903263092041016, "learning_rate": 1.4217e-05, "loss": 0.0968, "step": 4742 }, { "epoch": 26.647887323943664, "grad_norm": 0.6022658348083496, "learning_rate": 1.422e-05, "loss": 0.0424, "step": 4743 }, { "epoch": 26.653521126760563, "grad_norm": 0.48015111684799194, "learning_rate": 1.4223000000000001e-05, "loss": 0.0321, "step": 4744 }, { "epoch": 26.659154929577465, "grad_norm": 1.299860954284668, "learning_rate": 1.4226e-05, "loss": 0.0637, "step": 4745 }, { "epoch": 26.664788732394367, "grad_norm": 0.4438721835613251, "learning_rate": 1.4229e-05, "loss": 0.0291, "step": 4746 }, { "epoch": 26.670422535211266, "grad_norm": 0.7025146484375, "learning_rate": 1.4232e-05, "loss": 0.0426, "step": 4747 }, { "epoch": 26.676056338028168, "grad_norm": 0.5061723589897156, "learning_rate": 1.4235e-05, "loss": 0.0914, "step": 4748 }, { "epoch": 26.68169014084507, "grad_norm": 0.8653940558433533, "learning_rate": 1.4238000000000002e-05, "loss": 0.1125, "step": 4749 }, { "epoch": 26.687323943661973, "grad_norm": 0.6110497713088989, "learning_rate": 1.4241000000000001e-05, "loss": 0.0495, "step": 4750 }, { "epoch": 26.692957746478875, "grad_norm": 0.5815498232841492, "learning_rate": 1.4244000000000001e-05, "loss": 0.0803, "step": 4751 }, { "epoch": 26.698591549295774, "grad_norm": 0.6211065649986267, "learning_rate": 1.4247e-05, "loss": 0.0378, "step": 4752 }, { "epoch": 26.704225352112676, "grad_norm": 0.5804537534713745, "learning_rate": 1.4249999999999999e-05, "loss": 0.0793, "step": 4753 }, { "epoch": 26.70985915492958, "grad_norm": 0.6733036041259766, "learning_rate": 1.4253e-05, "loss": 0.0348, "step": 4754 }, { "epoch": 26.71549295774648, "grad_norm": 0.6640349626541138, "learning_rate": 1.4256e-05, "loss": 0.0406, "step": 4755 }, { "epoch": 26.72112676056338, "grad_norm": 0.4815010130405426, "learning_rate": 1.4259e-05, "loss": 0.0562, "step": 4756 }, { "epoch": 26.72676056338028, "grad_norm": 0.8611742854118347, "learning_rate": 1.4262e-05, "loss": 0.0328, "step": 4757 }, { "epoch": 26.732394366197184, "grad_norm": 0.5563916563987732, "learning_rate": 1.4265e-05, "loss": 0.0436, "step": 4758 }, { "epoch": 26.738028169014086, "grad_norm": 0.4007475674152374, "learning_rate": 1.4268000000000001e-05, "loss": 0.0189, "step": 4759 }, { "epoch": 26.743661971830985, "grad_norm": 0.6197370886802673, "learning_rate": 1.4271000000000001e-05, "loss": 0.0329, "step": 4760 }, { "epoch": 26.749295774647887, "grad_norm": 0.6861037611961365, "learning_rate": 1.4274000000000001e-05, "loss": 0.1468, "step": 4761 }, { "epoch": 26.75492957746479, "grad_norm": 0.6346372365951538, "learning_rate": 1.4277e-05, "loss": 0.1389, "step": 4762 }, { "epoch": 26.760563380281692, "grad_norm": 0.5934087038040161, "learning_rate": 1.428e-05, "loss": 0.1281, "step": 4763 }, { "epoch": 26.76619718309859, "grad_norm": 0.613767683506012, "learning_rate": 1.4283e-05, "loss": 0.1044, "step": 4764 }, { "epoch": 26.771830985915493, "grad_norm": 0.7058071494102478, "learning_rate": 1.4286e-05, "loss": 0.1218, "step": 4765 }, { "epoch": 26.777464788732395, "grad_norm": 0.8268466591835022, "learning_rate": 1.4289e-05, "loss": 0.1124, "step": 4766 }, { "epoch": 26.783098591549297, "grad_norm": 0.7006628513336182, "learning_rate": 1.4292e-05, "loss": 0.1087, "step": 4767 }, { "epoch": 26.788732394366196, "grad_norm": 0.6487562656402588, "learning_rate": 1.4295e-05, "loss": 0.0905, "step": 4768 }, { "epoch": 26.7943661971831, "grad_norm": 1.0965838432312012, "learning_rate": 1.4298000000000001e-05, "loss": 0.1713, "step": 4769 }, { "epoch": 26.8, "grad_norm": 0.6667519807815552, "learning_rate": 1.4301e-05, "loss": 0.0913, "step": 4770 }, { "epoch": 26.805633802816903, "grad_norm": 0.5433948636054993, "learning_rate": 1.4304e-05, "loss": 0.0839, "step": 4771 }, { "epoch": 26.8112676056338, "grad_norm": 0.6707758903503418, "learning_rate": 1.4307e-05, "loss": 0.1079, "step": 4772 }, { "epoch": 26.816901408450704, "grad_norm": 0.6320624947547913, "learning_rate": 1.431e-05, "loss": 0.0817, "step": 4773 }, { "epoch": 26.822535211267606, "grad_norm": 0.675203263759613, "learning_rate": 1.4313000000000002e-05, "loss": 0.1213, "step": 4774 }, { "epoch": 26.82816901408451, "grad_norm": 0.5426639914512634, "learning_rate": 1.4316000000000002e-05, "loss": 0.1098, "step": 4775 }, { "epoch": 26.833802816901407, "grad_norm": 0.5826584100723267, "learning_rate": 1.4319e-05, "loss": 0.1124, "step": 4776 }, { "epoch": 26.83943661971831, "grad_norm": 0.4697006642818451, "learning_rate": 1.4322e-05, "loss": 0.0579, "step": 4777 }, { "epoch": 26.845070422535212, "grad_norm": 0.7213027477264404, "learning_rate": 1.4325e-05, "loss": 0.0705, "step": 4778 }, { "epoch": 26.850704225352114, "grad_norm": 0.7102864980697632, "learning_rate": 1.4328e-05, "loss": 0.0636, "step": 4779 }, { "epoch": 26.856338028169013, "grad_norm": 0.612140953540802, "learning_rate": 1.4331e-05, "loss": 0.0503, "step": 4780 }, { "epoch": 26.861971830985915, "grad_norm": 0.8942006826400757, "learning_rate": 1.4334e-05, "loss": 0.0886, "step": 4781 }, { "epoch": 26.867605633802818, "grad_norm": 0.6737601161003113, "learning_rate": 1.4337e-05, "loss": 0.0703, "step": 4782 }, { "epoch": 26.87323943661972, "grad_norm": 0.6577408313751221, "learning_rate": 1.434e-05, "loss": 0.0508, "step": 4783 }, { "epoch": 26.87887323943662, "grad_norm": 0.5907220244407654, "learning_rate": 1.4343000000000001e-05, "loss": 0.0479, "step": 4784 }, { "epoch": 26.88450704225352, "grad_norm": 0.5073230266571045, "learning_rate": 1.4346000000000001e-05, "loss": 0.07, "step": 4785 }, { "epoch": 26.890140845070423, "grad_norm": 0.5227101445198059, "learning_rate": 1.4349000000000001e-05, "loss": 0.0807, "step": 4786 }, { "epoch": 26.895774647887325, "grad_norm": 0.5871232151985168, "learning_rate": 1.4352e-05, "loss": 0.0615, "step": 4787 }, { "epoch": 26.901408450704224, "grad_norm": 0.545843243598938, "learning_rate": 1.4355e-05, "loss": 0.0546, "step": 4788 }, { "epoch": 26.907042253521126, "grad_norm": 0.4214637577533722, "learning_rate": 1.4358e-05, "loss": 0.0329, "step": 4789 }, { "epoch": 26.91267605633803, "grad_norm": 0.8457795977592468, "learning_rate": 1.4361e-05, "loss": 0.0741, "step": 4790 }, { "epoch": 26.91830985915493, "grad_norm": 0.4776197671890259, "learning_rate": 1.4364e-05, "loss": 0.0392, "step": 4791 }, { "epoch": 26.92394366197183, "grad_norm": 0.5755966901779175, "learning_rate": 1.4367e-05, "loss": 0.0374, "step": 4792 }, { "epoch": 26.929577464788732, "grad_norm": 0.6158512234687805, "learning_rate": 1.437e-05, "loss": 0.042, "step": 4793 }, { "epoch": 26.935211267605634, "grad_norm": 0.6371371746063232, "learning_rate": 1.4373000000000001e-05, "loss": 0.0679, "step": 4794 }, { "epoch": 26.940845070422537, "grad_norm": 0.5314925312995911, "learning_rate": 1.4376000000000001e-05, "loss": 0.0453, "step": 4795 }, { "epoch": 26.946478873239435, "grad_norm": 0.5045316815376282, "learning_rate": 1.4379e-05, "loss": 0.0452, "step": 4796 }, { "epoch": 26.952112676056338, "grad_norm": 0.5479900240898132, "learning_rate": 1.4382e-05, "loss": 0.0466, "step": 4797 }, { "epoch": 26.95774647887324, "grad_norm": 0.6544597744941711, "learning_rate": 1.4385e-05, "loss": 0.0461, "step": 4798 }, { "epoch": 26.963380281690142, "grad_norm": 0.6886507272720337, "learning_rate": 1.4388000000000002e-05, "loss": 0.0418, "step": 4799 }, { "epoch": 26.96901408450704, "grad_norm": 0.9063920974731445, "learning_rate": 1.4391000000000002e-05, "loss": 0.1275, "step": 4800 }, { "epoch": 26.974647887323943, "grad_norm": 0.4458438456058502, "learning_rate": 1.4394e-05, "loss": 0.0181, "step": 4801 }, { "epoch": 26.980281690140846, "grad_norm": 0.5693234801292419, "learning_rate": 1.4397e-05, "loss": 0.0573, "step": 4802 }, { "epoch": 26.985915492957748, "grad_norm": 0.45846477150917053, "learning_rate": 1.44e-05, "loss": 0.0344, "step": 4803 }, { "epoch": 26.991549295774647, "grad_norm": 0.37494173645973206, "learning_rate": 1.4403e-05, "loss": 0.0276, "step": 4804 }, { "epoch": 26.99718309859155, "grad_norm": 0.673680305480957, "learning_rate": 1.4406e-05, "loss": 0.0783, "step": 4805 }, { "epoch": 27.0, "grad_norm": 0.857265293598175, "learning_rate": 1.4409e-05, "loss": 0.0193, "step": 4806 }, { "epoch": 27.005633802816902, "grad_norm": 0.6166837215423584, "learning_rate": 1.4412e-05, "loss": 0.1258, "step": 4807 }, { "epoch": 27.011267605633805, "grad_norm": 0.6641323566436768, "learning_rate": 1.4415e-05, "loss": 0.1689, "step": 4808 }, { "epoch": 27.016901408450703, "grad_norm": 0.5771862864494324, "learning_rate": 1.4418000000000002e-05, "loss": 0.1547, "step": 4809 }, { "epoch": 27.022535211267606, "grad_norm": 0.5502634048461914, "learning_rate": 1.4421000000000001e-05, "loss": 0.1511, "step": 4810 }, { "epoch": 27.028169014084508, "grad_norm": 0.6706319451332092, "learning_rate": 1.4424000000000001e-05, "loss": 0.1087, "step": 4811 }, { "epoch": 27.03380281690141, "grad_norm": 0.6062109470367432, "learning_rate": 1.4427000000000001e-05, "loss": 0.1205, "step": 4812 }, { "epoch": 27.03943661971831, "grad_norm": 0.5267991423606873, "learning_rate": 1.4429999999999999e-05, "loss": 0.1214, "step": 4813 }, { "epoch": 27.04507042253521, "grad_norm": 0.6835455894470215, "learning_rate": 1.4433e-05, "loss": 0.1396, "step": 4814 }, { "epoch": 27.050704225352113, "grad_norm": 0.9987457990646362, "learning_rate": 1.4436e-05, "loss": 0.104, "step": 4815 }, { "epoch": 27.056338028169016, "grad_norm": 0.6764596700668335, "learning_rate": 1.4439e-05, "loss": 0.093, "step": 4816 }, { "epoch": 27.061971830985915, "grad_norm": 0.5059166550636292, "learning_rate": 1.4442e-05, "loss": 0.07, "step": 4817 }, { "epoch": 27.067605633802817, "grad_norm": 0.5995231866836548, "learning_rate": 1.4445e-05, "loss": 0.0507, "step": 4818 }, { "epoch": 27.07323943661972, "grad_norm": 0.5987516045570374, "learning_rate": 1.4448e-05, "loss": 0.0814, "step": 4819 }, { "epoch": 27.07887323943662, "grad_norm": 0.4857785403728485, "learning_rate": 1.4451000000000001e-05, "loss": 0.068, "step": 4820 }, { "epoch": 27.08450704225352, "grad_norm": 0.5674054622650146, "learning_rate": 1.4454000000000001e-05, "loss": 0.1156, "step": 4821 }, { "epoch": 27.090140845070422, "grad_norm": 0.6343984603881836, "learning_rate": 1.4457e-05, "loss": 0.083, "step": 4822 }, { "epoch": 27.095774647887325, "grad_norm": 0.47462549805641174, "learning_rate": 1.446e-05, "loss": 0.0524, "step": 4823 }, { "epoch": 27.101408450704227, "grad_norm": 0.6579306125640869, "learning_rate": 1.4463e-05, "loss": 0.0593, "step": 4824 }, { "epoch": 27.107042253521126, "grad_norm": 0.4165186882019043, "learning_rate": 1.4466e-05, "loss": 0.0364, "step": 4825 }, { "epoch": 27.112676056338028, "grad_norm": 0.6687729954719543, "learning_rate": 1.4469e-05, "loss": 0.035, "step": 4826 }, { "epoch": 27.11830985915493, "grad_norm": 0.6445411443710327, "learning_rate": 1.4472e-05, "loss": 0.0979, "step": 4827 }, { "epoch": 27.123943661971833, "grad_norm": 0.46075326204299927, "learning_rate": 1.4475e-05, "loss": 0.0552, "step": 4828 }, { "epoch": 27.12957746478873, "grad_norm": 0.6398996710777283, "learning_rate": 1.4478e-05, "loss": 0.0864, "step": 4829 }, { "epoch": 27.135211267605634, "grad_norm": 0.37369802594184875, "learning_rate": 1.4481e-05, "loss": 0.034, "step": 4830 }, { "epoch": 27.140845070422536, "grad_norm": 0.581660807132721, "learning_rate": 1.4484e-05, "loss": 0.1014, "step": 4831 }, { "epoch": 27.146478873239438, "grad_norm": 0.5578526258468628, "learning_rate": 1.4487e-05, "loss": 0.0548, "step": 4832 }, { "epoch": 27.152112676056337, "grad_norm": 0.4809640347957611, "learning_rate": 1.449e-05, "loss": 0.0478, "step": 4833 }, { "epoch": 27.15774647887324, "grad_norm": 0.44849711656570435, "learning_rate": 1.4493e-05, "loss": 0.0306, "step": 4834 }, { "epoch": 27.16338028169014, "grad_norm": 0.7638527154922485, "learning_rate": 1.4496000000000001e-05, "loss": 0.0452, "step": 4835 }, { "epoch": 27.169014084507044, "grad_norm": 0.6991044282913208, "learning_rate": 1.4499000000000001e-05, "loss": 0.063, "step": 4836 }, { "epoch": 27.174647887323943, "grad_norm": 0.5348283648490906, "learning_rate": 1.4502000000000001e-05, "loss": 0.0317, "step": 4837 }, { "epoch": 27.180281690140845, "grad_norm": 0.5818392634391785, "learning_rate": 1.4505e-05, "loss": 0.0424, "step": 4838 }, { "epoch": 27.185915492957747, "grad_norm": 0.536068320274353, "learning_rate": 1.4507999999999999e-05, "loss": 0.0414, "step": 4839 }, { "epoch": 27.19154929577465, "grad_norm": 0.6229778528213501, "learning_rate": 1.4511e-05, "loss": 0.0537, "step": 4840 }, { "epoch": 27.197183098591548, "grad_norm": 0.9085911512374878, "learning_rate": 1.4514e-05, "loss": 0.0833, "step": 4841 }, { "epoch": 27.20281690140845, "grad_norm": 0.7931708693504333, "learning_rate": 1.4517e-05, "loss": 0.0697, "step": 4842 }, { "epoch": 27.208450704225353, "grad_norm": 0.4710341691970825, "learning_rate": 1.452e-05, "loss": 0.0533, "step": 4843 }, { "epoch": 27.214084507042255, "grad_norm": 0.8240888714790344, "learning_rate": 1.4523e-05, "loss": 0.0206, "step": 4844 }, { "epoch": 27.219718309859154, "grad_norm": 0.5967980027198792, "learning_rate": 1.4526000000000001e-05, "loss": 0.052, "step": 4845 }, { "epoch": 27.225352112676056, "grad_norm": 0.8566497564315796, "learning_rate": 1.4529000000000001e-05, "loss": 0.0556, "step": 4846 }, { "epoch": 27.23098591549296, "grad_norm": 0.5402615070343018, "learning_rate": 1.4532e-05, "loss": 0.0246, "step": 4847 }, { "epoch": 27.23661971830986, "grad_norm": 0.46531057357788086, "learning_rate": 1.4535e-05, "loss": 0.0699, "step": 4848 }, { "epoch": 27.24225352112676, "grad_norm": 0.46954432129859924, "learning_rate": 1.4538e-05, "loss": 0.0479, "step": 4849 }, { "epoch": 27.24788732394366, "grad_norm": 0.5690462589263916, "learning_rate": 1.4541e-05, "loss": 0.0464, "step": 4850 }, { "epoch": 27.253521126760564, "grad_norm": 0.8503040075302124, "learning_rate": 1.4544e-05, "loss": 0.1717, "step": 4851 }, { "epoch": 27.259154929577466, "grad_norm": 0.5776522755622864, "learning_rate": 1.4547e-05, "loss": 0.1086, "step": 4852 }, { "epoch": 27.264788732394365, "grad_norm": 0.6937713027000427, "learning_rate": 1.455e-05, "loss": 0.1219, "step": 4853 }, { "epoch": 27.270422535211267, "grad_norm": 1.107546329498291, "learning_rate": 1.4553e-05, "loss": 0.1458, "step": 4854 }, { "epoch": 27.27605633802817, "grad_norm": 0.620006799697876, "learning_rate": 1.4556000000000001e-05, "loss": 0.0843, "step": 4855 }, { "epoch": 27.281690140845072, "grad_norm": 0.6403486728668213, "learning_rate": 1.4559e-05, "loss": 0.1247, "step": 4856 }, { "epoch": 27.28732394366197, "grad_norm": 0.9365518689155579, "learning_rate": 1.4562e-05, "loss": 0.1353, "step": 4857 }, { "epoch": 27.292957746478873, "grad_norm": 0.8805404901504517, "learning_rate": 1.4565e-05, "loss": 0.1181, "step": 4858 }, { "epoch": 27.298591549295775, "grad_norm": 0.6936009526252747, "learning_rate": 1.4568e-05, "loss": 0.0798, "step": 4859 }, { "epoch": 27.304225352112677, "grad_norm": 0.6158699989318848, "learning_rate": 1.4571000000000002e-05, "loss": 0.1085, "step": 4860 }, { "epoch": 27.309859154929576, "grad_norm": 0.6138626337051392, "learning_rate": 1.4574000000000001e-05, "loss": 0.1149, "step": 4861 }, { "epoch": 27.31549295774648, "grad_norm": 0.6710749268531799, "learning_rate": 1.4577e-05, "loss": 0.0602, "step": 4862 }, { "epoch": 27.32112676056338, "grad_norm": 0.591975748538971, "learning_rate": 1.458e-05, "loss": 0.1104, "step": 4863 }, { "epoch": 27.326760563380283, "grad_norm": 0.5016064047813416, "learning_rate": 1.4582999999999999e-05, "loss": 0.0691, "step": 4864 }, { "epoch": 27.33239436619718, "grad_norm": 0.6836541295051575, "learning_rate": 1.4586e-05, "loss": 0.077, "step": 4865 }, { "epoch": 27.338028169014084, "grad_norm": 0.5612192153930664, "learning_rate": 1.4589e-05, "loss": 0.0641, "step": 4866 }, { "epoch": 27.343661971830986, "grad_norm": 0.553382158279419, "learning_rate": 1.4592e-05, "loss": 0.0688, "step": 4867 }, { "epoch": 27.34929577464789, "grad_norm": 0.6770462989807129, "learning_rate": 1.4595e-05, "loss": 0.1096, "step": 4868 }, { "epoch": 27.354929577464787, "grad_norm": 0.6650416851043701, "learning_rate": 1.4598e-05, "loss": 0.0447, "step": 4869 }, { "epoch": 27.36056338028169, "grad_norm": 0.8601246476173401, "learning_rate": 1.4601000000000001e-05, "loss": 0.0778, "step": 4870 }, { "epoch": 27.366197183098592, "grad_norm": 0.6055887937545776, "learning_rate": 1.4604000000000001e-05, "loss": 0.0406, "step": 4871 }, { "epoch": 27.371830985915494, "grad_norm": 0.6669006943702698, "learning_rate": 1.4607000000000001e-05, "loss": 0.0524, "step": 4872 }, { "epoch": 27.377464788732393, "grad_norm": 0.6520717740058899, "learning_rate": 1.461e-05, "loss": 0.0788, "step": 4873 }, { "epoch": 27.383098591549295, "grad_norm": 0.5716097950935364, "learning_rate": 1.4613e-05, "loss": 0.04, "step": 4874 }, { "epoch": 27.388732394366198, "grad_norm": 0.5934821963310242, "learning_rate": 1.4616e-05, "loss": 0.042, "step": 4875 }, { "epoch": 27.3943661971831, "grad_norm": 0.5447319149971008, "learning_rate": 1.4619e-05, "loss": 0.0368, "step": 4876 }, { "epoch": 27.4, "grad_norm": 0.6263892650604248, "learning_rate": 1.4622e-05, "loss": 0.0408, "step": 4877 }, { "epoch": 27.4056338028169, "grad_norm": 1.2496488094329834, "learning_rate": 1.4625e-05, "loss": 0.0478, "step": 4878 }, { "epoch": 27.411267605633803, "grad_norm": 0.4757492244243622, "learning_rate": 1.4628e-05, "loss": 0.0351, "step": 4879 }, { "epoch": 27.416901408450705, "grad_norm": 0.5115252137184143, "learning_rate": 1.4631000000000001e-05, "loss": 0.0334, "step": 4880 }, { "epoch": 27.422535211267604, "grad_norm": 0.6796021461486816, "learning_rate": 1.4634e-05, "loss": 0.0717, "step": 4881 }, { "epoch": 27.428169014084506, "grad_norm": 0.3681405484676361, "learning_rate": 1.4637e-05, "loss": 0.0264, "step": 4882 }, { "epoch": 27.43380281690141, "grad_norm": 0.9370042085647583, "learning_rate": 1.464e-05, "loss": 0.1241, "step": 4883 }, { "epoch": 27.43943661971831, "grad_norm": 0.5246590375900269, "learning_rate": 1.4643e-05, "loss": 0.0353, "step": 4884 }, { "epoch": 27.44507042253521, "grad_norm": 0.6782833337783813, "learning_rate": 1.4646000000000002e-05, "loss": 0.0526, "step": 4885 }, { "epoch": 27.450704225352112, "grad_norm": 0.5108622312545776, "learning_rate": 1.4649000000000002e-05, "loss": 0.0595, "step": 4886 }, { "epoch": 27.456338028169014, "grad_norm": 0.5526118874549866, "learning_rate": 1.4652e-05, "loss": 0.0416, "step": 4887 }, { "epoch": 27.461971830985917, "grad_norm": 0.6113744378089905, "learning_rate": 1.4655e-05, "loss": 0.0171, "step": 4888 }, { "epoch": 27.467605633802815, "grad_norm": 0.5926219820976257, "learning_rate": 1.4658e-05, "loss": 0.0399, "step": 4889 }, { "epoch": 27.473239436619718, "grad_norm": 0.5102714896202087, "learning_rate": 1.4661e-05, "loss": 0.0694, "step": 4890 }, { "epoch": 27.47887323943662, "grad_norm": 0.5079906582832336, "learning_rate": 1.4664e-05, "loss": 0.0309, "step": 4891 }, { "epoch": 27.484507042253522, "grad_norm": 0.7279917001724243, "learning_rate": 1.4667e-05, "loss": 0.0882, "step": 4892 }, { "epoch": 27.49014084507042, "grad_norm": 0.38320687413215637, "learning_rate": 1.467e-05, "loss": 0.0156, "step": 4893 }, { "epoch": 27.495774647887323, "grad_norm": 0.49306759238243103, "learning_rate": 1.4673e-05, "loss": 0.0256, "step": 4894 }, { "epoch": 27.501408450704226, "grad_norm": 0.8105533719062805, "learning_rate": 1.4676000000000001e-05, "loss": 0.1869, "step": 4895 }, { "epoch": 27.507042253521128, "grad_norm": 0.7150955200195312, "learning_rate": 1.4679000000000001e-05, "loss": 0.1234, "step": 4896 }, { "epoch": 27.512676056338027, "grad_norm": 0.511487603187561, "learning_rate": 1.4682000000000001e-05, "loss": 0.1241, "step": 4897 }, { "epoch": 27.51830985915493, "grad_norm": 0.5989032983779907, "learning_rate": 1.4685000000000001e-05, "loss": 0.1174, "step": 4898 }, { "epoch": 27.52394366197183, "grad_norm": 0.6633748412132263, "learning_rate": 1.4687999999999999e-05, "loss": 0.1586, "step": 4899 }, { "epoch": 27.529577464788733, "grad_norm": 0.6565715074539185, "learning_rate": 1.4691e-05, "loss": 0.1424, "step": 4900 }, { "epoch": 27.535211267605632, "grad_norm": 0.5008484125137329, "learning_rate": 1.4694e-05, "loss": 0.0822, "step": 4901 }, { "epoch": 27.540845070422534, "grad_norm": 0.4523524343967438, "learning_rate": 1.4697e-05, "loss": 0.0823, "step": 4902 }, { "epoch": 27.546478873239437, "grad_norm": 0.8573121428489685, "learning_rate": 1.47e-05, "loss": 0.1039, "step": 4903 }, { "epoch": 27.55211267605634, "grad_norm": 0.511228621006012, "learning_rate": 1.4703e-05, "loss": 0.0819, "step": 4904 }, { "epoch": 27.557746478873238, "grad_norm": 0.5802244544029236, "learning_rate": 1.4706000000000001e-05, "loss": 0.1119, "step": 4905 }, { "epoch": 27.56338028169014, "grad_norm": 0.6239862442016602, "learning_rate": 1.4709000000000001e-05, "loss": 0.0715, "step": 4906 }, { "epoch": 27.569014084507042, "grad_norm": 0.5307694673538208, "learning_rate": 1.4712e-05, "loss": 0.0784, "step": 4907 }, { "epoch": 27.574647887323945, "grad_norm": 0.7003507018089294, "learning_rate": 1.4715e-05, "loss": 0.0868, "step": 4908 }, { "epoch": 27.580281690140843, "grad_norm": 0.5749926567077637, "learning_rate": 1.4718e-05, "loss": 0.0904, "step": 4909 }, { "epoch": 27.585915492957746, "grad_norm": 0.5870910882949829, "learning_rate": 1.4721000000000002e-05, "loss": 0.0945, "step": 4910 }, { "epoch": 27.591549295774648, "grad_norm": 1.098414421081543, "learning_rate": 1.4724e-05, "loss": 0.0541, "step": 4911 }, { "epoch": 27.59718309859155, "grad_norm": 0.7292921543121338, "learning_rate": 1.4727e-05, "loss": 0.0827, "step": 4912 }, { "epoch": 27.60281690140845, "grad_norm": 0.6000948548316956, "learning_rate": 1.473e-05, "loss": 0.0505, "step": 4913 }, { "epoch": 27.60845070422535, "grad_norm": 0.46747836470603943, "learning_rate": 1.4733e-05, "loss": 0.0591, "step": 4914 }, { "epoch": 27.614084507042254, "grad_norm": 0.5252728462219238, "learning_rate": 1.4736000000000001e-05, "loss": 0.065, "step": 4915 }, { "epoch": 27.619718309859156, "grad_norm": 0.7016692757606506, "learning_rate": 1.4739e-05, "loss": 0.0684, "step": 4916 }, { "epoch": 27.625352112676055, "grad_norm": 0.5762319564819336, "learning_rate": 1.4742e-05, "loss": 0.05, "step": 4917 }, { "epoch": 27.630985915492957, "grad_norm": 0.7141552567481995, "learning_rate": 1.4745e-05, "loss": 0.0547, "step": 4918 }, { "epoch": 27.63661971830986, "grad_norm": 0.5354654788970947, "learning_rate": 1.4748e-05, "loss": 0.0418, "step": 4919 }, { "epoch": 27.64225352112676, "grad_norm": 0.6054145693778992, "learning_rate": 1.4751000000000002e-05, "loss": 0.0481, "step": 4920 }, { "epoch": 27.647887323943664, "grad_norm": 0.7276031970977783, "learning_rate": 1.4754000000000001e-05, "loss": 0.0347, "step": 4921 }, { "epoch": 27.653521126760563, "grad_norm": 0.7706169486045837, "learning_rate": 1.4757000000000001e-05, "loss": 0.0449, "step": 4922 }, { "epoch": 27.659154929577465, "grad_norm": 0.4175261855125427, "learning_rate": 1.4760000000000001e-05, "loss": 0.0377, "step": 4923 }, { "epoch": 27.664788732394367, "grad_norm": 0.6430063247680664, "learning_rate": 1.4762999999999999e-05, "loss": 0.0556, "step": 4924 }, { "epoch": 27.670422535211266, "grad_norm": 0.499721884727478, "learning_rate": 1.4766e-05, "loss": 0.0341, "step": 4925 }, { "epoch": 27.676056338028168, "grad_norm": 0.6450969576835632, "learning_rate": 1.4769e-05, "loss": 0.0509, "step": 4926 }, { "epoch": 27.68169014084507, "grad_norm": 0.5008199214935303, "learning_rate": 1.4772e-05, "loss": 0.0705, "step": 4927 }, { "epoch": 27.687323943661973, "grad_norm": 0.5646867752075195, "learning_rate": 1.4775e-05, "loss": 0.0282, "step": 4928 }, { "epoch": 27.692957746478875, "grad_norm": 0.4513305723667145, "learning_rate": 1.4778e-05, "loss": 0.0404, "step": 4929 }, { "epoch": 27.698591549295774, "grad_norm": 0.6260466575622559, "learning_rate": 1.4781000000000001e-05, "loss": 0.0404, "step": 4930 }, { "epoch": 27.704225352112676, "grad_norm": 0.4054688811302185, "learning_rate": 1.4784000000000001e-05, "loss": 0.0139, "step": 4931 }, { "epoch": 27.70985915492958, "grad_norm": 0.7496935725212097, "learning_rate": 1.4787000000000001e-05, "loss": 0.0347, "step": 4932 }, { "epoch": 27.71549295774648, "grad_norm": 1.3134689331054688, "learning_rate": 1.479e-05, "loss": 0.0355, "step": 4933 }, { "epoch": 27.72112676056338, "grad_norm": 0.6447964310646057, "learning_rate": 1.4793e-05, "loss": 0.0234, "step": 4934 }, { "epoch": 27.72676056338028, "grad_norm": 0.4937184751033783, "learning_rate": 1.4796000000000002e-05, "loss": 0.0294, "step": 4935 }, { "epoch": 27.732394366197184, "grad_norm": 0.43360471725463867, "learning_rate": 1.4799e-05, "loss": 0.0163, "step": 4936 }, { "epoch": 27.738028169014086, "grad_norm": 0.5425284504890442, "learning_rate": 1.4802e-05, "loss": 0.0552, "step": 4937 }, { "epoch": 27.743661971830985, "grad_norm": 0.67104172706604, "learning_rate": 1.4805e-05, "loss": 0.0263, "step": 4938 }, { "epoch": 27.749295774647887, "grad_norm": 1.2760601043701172, "learning_rate": 1.4808e-05, "loss": 0.2768, "step": 4939 }, { "epoch": 27.75492957746479, "grad_norm": 0.629448652267456, "learning_rate": 1.4811000000000001e-05, "loss": 0.121, "step": 4940 }, { "epoch": 27.760563380281692, "grad_norm": 0.7460485100746155, "learning_rate": 1.4814e-05, "loss": 0.1311, "step": 4941 }, { "epoch": 27.76619718309859, "grad_norm": 0.5845306515693665, "learning_rate": 1.4817e-05, "loss": 0.1018, "step": 4942 }, { "epoch": 27.771830985915493, "grad_norm": 0.5577617883682251, "learning_rate": 1.482e-05, "loss": 0.1294, "step": 4943 }, { "epoch": 27.777464788732395, "grad_norm": 0.5754668712615967, "learning_rate": 1.4823e-05, "loss": 0.0918, "step": 4944 }, { "epoch": 27.783098591549297, "grad_norm": 0.6356242299079895, "learning_rate": 1.4826e-05, "loss": 0.1231, "step": 4945 }, { "epoch": 27.788732394366196, "grad_norm": 1.0962554216384888, "learning_rate": 1.4829000000000002e-05, "loss": 0.1704, "step": 4946 }, { "epoch": 27.7943661971831, "grad_norm": 0.5350479483604431, "learning_rate": 1.4832000000000001e-05, "loss": 0.0902, "step": 4947 }, { "epoch": 27.8, "grad_norm": 0.5375921130180359, "learning_rate": 1.4835e-05, "loss": 0.0778, "step": 4948 }, { "epoch": 27.805633802816903, "grad_norm": 0.688414454460144, "learning_rate": 1.4838e-05, "loss": 0.0839, "step": 4949 }, { "epoch": 27.8112676056338, "grad_norm": 0.7240037322044373, "learning_rate": 1.4840999999999999e-05, "loss": 0.1103, "step": 4950 }, { "epoch": 27.816901408450704, "grad_norm": 0.5880854725837708, "learning_rate": 1.4844e-05, "loss": 0.0954, "step": 4951 }, { "epoch": 27.822535211267606, "grad_norm": 0.578026294708252, "learning_rate": 1.4847e-05, "loss": 0.1561, "step": 4952 }, { "epoch": 27.82816901408451, "grad_norm": 0.5621540546417236, "learning_rate": 1.485e-05, "loss": 0.1204, "step": 4953 }, { "epoch": 27.833802816901407, "grad_norm": 0.6167405843734741, "learning_rate": 1.4853e-05, "loss": 0.0722, "step": 4954 }, { "epoch": 27.83943661971831, "grad_norm": 0.6039847135543823, "learning_rate": 1.4856e-05, "loss": 0.0818, "step": 4955 }, { "epoch": 27.845070422535212, "grad_norm": 0.6319277286529541, "learning_rate": 1.4859000000000001e-05, "loss": 0.0571, "step": 4956 }, { "epoch": 27.850704225352114, "grad_norm": 0.5956560969352722, "learning_rate": 1.4862000000000001e-05, "loss": 0.086, "step": 4957 }, { "epoch": 27.856338028169013, "grad_norm": 0.48726123571395874, "learning_rate": 1.4865e-05, "loss": 0.049, "step": 4958 }, { "epoch": 27.861971830985915, "grad_norm": 0.6268653869628906, "learning_rate": 1.4868e-05, "loss": 0.0522, "step": 4959 }, { "epoch": 27.867605633802818, "grad_norm": 0.5409794449806213, "learning_rate": 1.4871e-05, "loss": 0.0636, "step": 4960 }, { "epoch": 27.87323943661972, "grad_norm": 0.5530878305435181, "learning_rate": 1.4874e-05, "loss": 0.0575, "step": 4961 }, { "epoch": 27.87887323943662, "grad_norm": 0.63461834192276, "learning_rate": 1.4877e-05, "loss": 0.0717, "step": 4962 }, { "epoch": 27.88450704225352, "grad_norm": 0.5703501105308533, "learning_rate": 1.488e-05, "loss": 0.032, "step": 4963 }, { "epoch": 27.890140845070423, "grad_norm": 0.44545480608940125, "learning_rate": 1.4883e-05, "loss": 0.0474, "step": 4964 }, { "epoch": 27.895774647887325, "grad_norm": 2.484574317932129, "learning_rate": 1.4886e-05, "loss": 0.1042, "step": 4965 }, { "epoch": 27.901408450704224, "grad_norm": 0.40536513924598694, "learning_rate": 1.4889000000000001e-05, "loss": 0.0451, "step": 4966 }, { "epoch": 27.907042253521126, "grad_norm": 0.6032202243804932, "learning_rate": 1.4892e-05, "loss": 0.0376, "step": 4967 }, { "epoch": 27.91267605633803, "grad_norm": 1.3796416521072388, "learning_rate": 1.4895e-05, "loss": 0.0884, "step": 4968 }, { "epoch": 27.91830985915493, "grad_norm": 0.4352993071079254, "learning_rate": 1.4898e-05, "loss": 0.0316, "step": 4969 }, { "epoch": 27.92394366197183, "grad_norm": 0.42185598611831665, "learning_rate": 1.4901e-05, "loss": 0.0377, "step": 4970 }, { "epoch": 27.929577464788732, "grad_norm": 0.6166189908981323, "learning_rate": 1.4904000000000002e-05, "loss": 0.0674, "step": 4971 }, { "epoch": 27.935211267605634, "grad_norm": 0.5457927584648132, "learning_rate": 1.4907000000000001e-05, "loss": 0.0858, "step": 4972 }, { "epoch": 27.940845070422537, "grad_norm": 0.3100431561470032, "learning_rate": 1.491e-05, "loss": 0.021, "step": 4973 }, { "epoch": 27.946478873239435, "grad_norm": 0.49587565660476685, "learning_rate": 1.4913e-05, "loss": 0.0329, "step": 4974 }, { "epoch": 27.952112676056338, "grad_norm": 0.6730433702468872, "learning_rate": 1.4915999999999999e-05, "loss": 0.0745, "step": 4975 }, { "epoch": 27.95774647887324, "grad_norm": 0.7008295655250549, "learning_rate": 1.4919e-05, "loss": 0.0629, "step": 4976 }, { "epoch": 27.963380281690142, "grad_norm": 1.163342833518982, "learning_rate": 1.4922e-05, "loss": 0.0973, "step": 4977 }, { "epoch": 27.96901408450704, "grad_norm": 0.523129403591156, "learning_rate": 1.4925e-05, "loss": 0.0287, "step": 4978 }, { "epoch": 27.974647887323943, "grad_norm": 0.4284622371196747, "learning_rate": 1.4928e-05, "loss": 0.0217, "step": 4979 }, { "epoch": 27.980281690140846, "grad_norm": 0.44465354084968567, "learning_rate": 1.4931e-05, "loss": 0.0148, "step": 4980 }, { "epoch": 27.985915492957748, "grad_norm": 0.4868376553058624, "learning_rate": 1.4934000000000001e-05, "loss": 0.0481, "step": 4981 }, { "epoch": 27.991549295774647, "grad_norm": 0.6332249045372009, "learning_rate": 1.4937000000000001e-05, "loss": 0.0309, "step": 4982 }, { "epoch": 27.99718309859155, "grad_norm": 1.0031765699386597, "learning_rate": 1.4940000000000001e-05, "loss": 0.0851, "step": 4983 }, { "epoch": 28.0, "grad_norm": 0.1905517429113388, "learning_rate": 1.4943e-05, "loss": 0.0062, "step": 4984 }, { "epoch": 28.005633802816902, "grad_norm": 0.6297371983528137, "learning_rate": 1.4945999999999999e-05, "loss": 0.1299, "step": 4985 }, { "epoch": 28.011267605633805, "grad_norm": 0.648108184337616, "learning_rate": 1.4949e-05, "loss": 0.1598, "step": 4986 }, { "epoch": 28.016901408450703, "grad_norm": 0.8379295468330383, "learning_rate": 1.4952e-05, "loss": 0.1302, "step": 4987 }, { "epoch": 28.022535211267606, "grad_norm": 0.6076474189758301, "learning_rate": 1.4955e-05, "loss": 0.1052, "step": 4988 }, { "epoch": 28.028169014084508, "grad_norm": 0.7870191335678101, "learning_rate": 1.4958e-05, "loss": 0.1085, "step": 4989 }, { "epoch": 28.03380281690141, "grad_norm": 0.5815459489822388, "learning_rate": 1.4961e-05, "loss": 0.1018, "step": 4990 }, { "epoch": 28.03943661971831, "grad_norm": 0.6764193773269653, "learning_rate": 1.4964000000000001e-05, "loss": 0.1166, "step": 4991 }, { "epoch": 28.04507042253521, "grad_norm": 0.985612154006958, "learning_rate": 1.4967000000000001e-05, "loss": 0.1714, "step": 4992 }, { "epoch": 28.050704225352113, "grad_norm": 0.6194036602973938, "learning_rate": 1.497e-05, "loss": 0.0825, "step": 4993 }, { "epoch": 28.056338028169016, "grad_norm": 0.5836390852928162, "learning_rate": 1.4973e-05, "loss": 0.0866, "step": 4994 }, { "epoch": 28.061971830985915, "grad_norm": 0.541460394859314, "learning_rate": 1.4976e-05, "loss": 0.0948, "step": 4995 }, { "epoch": 28.067605633802817, "grad_norm": 0.49652934074401855, "learning_rate": 1.4979000000000002e-05, "loss": 0.0688, "step": 4996 }, { "epoch": 28.07323943661972, "grad_norm": 0.6126191020011902, "learning_rate": 1.4982e-05, "loss": 0.0863, "step": 4997 }, { "epoch": 28.07887323943662, "grad_norm": 0.9024507999420166, "learning_rate": 1.4985e-05, "loss": 0.1361, "step": 4998 }, { "epoch": 28.08450704225352, "grad_norm": 0.5036882162094116, "learning_rate": 1.4988e-05, "loss": 0.0578, "step": 4999 }, { "epoch": 28.090140845070422, "grad_norm": 0.6240023970603943, "learning_rate": 1.4991e-05, "loss": 0.0849, "step": 5000 }, { "epoch": 28.090140845070422, "eval_cer": 0.11097285947585349, "eval_loss": 0.35934266448020935, "eval_runtime": 16.0797, "eval_samples_per_second": 18.906, "eval_steps_per_second": 0.622, "eval_wer": 0.3860322333077513, "step": 5000 }, { "epoch": 28.095774647887325, "grad_norm": 0.5462090969085693, "learning_rate": 1.4994e-05, "loss": 0.0566, "step": 5001 }, { "epoch": 28.101408450704227, "grad_norm": 0.4824041426181793, "learning_rate": 1.4997e-05, "loss": 0.0491, "step": 5002 }, { "epoch": 28.107042253521126, "grad_norm": 0.6504799723625183, "learning_rate": 1.5e-05, "loss": 0.0643, "step": 5003 }, { "epoch": 28.112676056338028, "grad_norm": 0.5091484189033508, "learning_rate": 1.5003e-05, "loss": 0.044, "step": 5004 }, { "epoch": 28.11830985915493, "grad_norm": 0.5613542795181274, "learning_rate": 1.5006e-05, "loss": 0.0477, "step": 5005 }, { "epoch": 28.123943661971833, "grad_norm": 0.5610839128494263, "learning_rate": 1.5009e-05, "loss": 0.0782, "step": 5006 }, { "epoch": 28.12957746478873, "grad_norm": 0.6948246359825134, "learning_rate": 1.5012e-05, "loss": 0.0522, "step": 5007 }, { "epoch": 28.135211267605634, "grad_norm": 0.45367205142974854, "learning_rate": 1.5015e-05, "loss": 0.0558, "step": 5008 }, { "epoch": 28.140845070422536, "grad_norm": 0.5706816911697388, "learning_rate": 1.5018000000000001e-05, "loss": 0.0328, "step": 5009 }, { "epoch": 28.146478873239438, "grad_norm": 0.3302563726902008, "learning_rate": 1.5021e-05, "loss": 0.0223, "step": 5010 }, { "epoch": 28.152112676056337, "grad_norm": 0.48660680651664734, "learning_rate": 1.5024e-05, "loss": 0.042, "step": 5011 }, { "epoch": 28.15774647887324, "grad_norm": 0.6003103256225586, "learning_rate": 1.5027e-05, "loss": 0.0946, "step": 5012 }, { "epoch": 28.16338028169014, "grad_norm": 0.5214948654174805, "learning_rate": 1.503e-05, "loss": 0.038, "step": 5013 }, { "epoch": 28.169014084507044, "grad_norm": 0.5867151021957397, "learning_rate": 1.5033e-05, "loss": 0.0361, "step": 5014 }, { "epoch": 28.174647887323943, "grad_norm": 0.8113002181053162, "learning_rate": 1.5036e-05, "loss": 0.0421, "step": 5015 }, { "epoch": 28.180281690140845, "grad_norm": 0.3862939178943634, "learning_rate": 1.5039e-05, "loss": 0.0242, "step": 5016 }, { "epoch": 28.185915492957747, "grad_norm": 0.4959786832332611, "learning_rate": 1.5042e-05, "loss": 0.0412, "step": 5017 }, { "epoch": 28.19154929577465, "grad_norm": 0.5077106952667236, "learning_rate": 1.5044999999999999e-05, "loss": 0.0261, "step": 5018 }, { "epoch": 28.197183098591548, "grad_norm": 0.40455931425094604, "learning_rate": 1.5048000000000002e-05, "loss": 0.0215, "step": 5019 }, { "epoch": 28.20281690140845, "grad_norm": 0.5101828575134277, "learning_rate": 1.5051000000000002e-05, "loss": 0.0341, "step": 5020 }, { "epoch": 28.208450704225353, "grad_norm": 0.42079421877861023, "learning_rate": 1.5054000000000002e-05, "loss": 0.0335, "step": 5021 }, { "epoch": 28.214084507042255, "grad_norm": 0.5789754390716553, "learning_rate": 1.5057e-05, "loss": 0.0577, "step": 5022 }, { "epoch": 28.219718309859154, "grad_norm": 0.7131274342536926, "learning_rate": 1.506e-05, "loss": 0.068, "step": 5023 }, { "epoch": 28.225352112676056, "grad_norm": 0.4134787917137146, "learning_rate": 1.5063e-05, "loss": 0.0367, "step": 5024 }, { "epoch": 28.23098591549296, "grad_norm": 0.4394420385360718, "learning_rate": 1.5066e-05, "loss": 0.0228, "step": 5025 }, { "epoch": 28.23661971830986, "grad_norm": 0.4282046854496002, "learning_rate": 1.5069e-05, "loss": 0.0189, "step": 5026 }, { "epoch": 28.24225352112676, "grad_norm": 0.5622921586036682, "learning_rate": 1.5071999999999999e-05, "loss": 0.0435, "step": 5027 }, { "epoch": 28.24788732394366, "grad_norm": 0.8353300094604492, "learning_rate": 1.5074999999999999e-05, "loss": 0.0316, "step": 5028 }, { "epoch": 28.253521126760564, "grad_norm": 0.8843896389007568, "learning_rate": 1.5078000000000002e-05, "loss": 0.1845, "step": 5029 }, { "epoch": 28.259154929577466, "grad_norm": 0.611383855342865, "learning_rate": 1.5081000000000002e-05, "loss": 0.0946, "step": 5030 }, { "epoch": 28.264788732394365, "grad_norm": 0.612872838973999, "learning_rate": 1.5084000000000002e-05, "loss": 0.1226, "step": 5031 }, { "epoch": 28.270422535211267, "grad_norm": 0.7628498077392578, "learning_rate": 1.5087000000000001e-05, "loss": 0.1243, "step": 5032 }, { "epoch": 28.27605633802817, "grad_norm": 0.5856537818908691, "learning_rate": 1.5090000000000001e-05, "loss": 0.1233, "step": 5033 }, { "epoch": 28.281690140845072, "grad_norm": 0.7676257491111755, "learning_rate": 1.5093e-05, "loss": 0.1632, "step": 5034 }, { "epoch": 28.28732394366197, "grad_norm": 0.6813649535179138, "learning_rate": 1.5095999999999999e-05, "loss": 0.1044, "step": 5035 }, { "epoch": 28.292957746478873, "grad_norm": 0.5896927714347839, "learning_rate": 1.5098999999999999e-05, "loss": 0.0829, "step": 5036 }, { "epoch": 28.298591549295775, "grad_norm": 0.6047108769416809, "learning_rate": 1.5101999999999999e-05, "loss": 0.0839, "step": 5037 }, { "epoch": 28.304225352112677, "grad_norm": 0.48104214668273926, "learning_rate": 1.5104999999999999e-05, "loss": 0.0658, "step": 5038 }, { "epoch": 28.309859154929576, "grad_norm": 0.7545119524002075, "learning_rate": 1.5108000000000002e-05, "loss": 0.1219, "step": 5039 }, { "epoch": 28.31549295774648, "grad_norm": 0.7601920962333679, "learning_rate": 1.5111000000000002e-05, "loss": 0.0715, "step": 5040 }, { "epoch": 28.32112676056338, "grad_norm": 0.44255393743515015, "learning_rate": 1.5114000000000001e-05, "loss": 0.0873, "step": 5041 }, { "epoch": 28.326760563380283, "grad_norm": 0.7731963396072388, "learning_rate": 1.5117000000000001e-05, "loss": 0.0759, "step": 5042 }, { "epoch": 28.33239436619718, "grad_norm": 0.5665032267570496, "learning_rate": 1.5120000000000001e-05, "loss": 0.133, "step": 5043 }, { "epoch": 28.338028169014084, "grad_norm": 0.5269644260406494, "learning_rate": 1.5123e-05, "loss": 0.0444, "step": 5044 }, { "epoch": 28.343661971830986, "grad_norm": 0.706869900226593, "learning_rate": 1.5126e-05, "loss": 0.0999, "step": 5045 }, { "epoch": 28.34929577464789, "grad_norm": 0.42561665177345276, "learning_rate": 1.5129e-05, "loss": 0.0421, "step": 5046 }, { "epoch": 28.354929577464787, "grad_norm": 0.46806076169013977, "learning_rate": 1.5131999999999998e-05, "loss": 0.0574, "step": 5047 }, { "epoch": 28.36056338028169, "grad_norm": 0.6040133833885193, "learning_rate": 1.5134999999999998e-05, "loss": 0.0632, "step": 5048 }, { "epoch": 28.366197183098592, "grad_norm": 0.5391446352005005, "learning_rate": 1.5138000000000001e-05, "loss": 0.0611, "step": 5049 }, { "epoch": 28.371830985915494, "grad_norm": 0.8513173460960388, "learning_rate": 1.5141000000000001e-05, "loss": 0.0661, "step": 5050 }, { "epoch": 28.377464788732393, "grad_norm": 0.41929569840431213, "learning_rate": 1.5144000000000001e-05, "loss": 0.0402, "step": 5051 }, { "epoch": 28.383098591549295, "grad_norm": 0.5351955890655518, "learning_rate": 1.5147e-05, "loss": 0.0536, "step": 5052 }, { "epoch": 28.388732394366198, "grad_norm": 0.7283332347869873, "learning_rate": 1.515e-05, "loss": 0.0674, "step": 5053 }, { "epoch": 28.3943661971831, "grad_norm": 0.8806993365287781, "learning_rate": 1.5153e-05, "loss": 0.0605, "step": 5054 }, { "epoch": 28.4, "grad_norm": 0.5898083448410034, "learning_rate": 1.5156e-05, "loss": 0.0447, "step": 5055 }, { "epoch": 28.4056338028169, "grad_norm": 0.5553488731384277, "learning_rate": 1.5159e-05, "loss": 0.0419, "step": 5056 }, { "epoch": 28.411267605633803, "grad_norm": 0.7786492109298706, "learning_rate": 1.5162e-05, "loss": 0.0342, "step": 5057 }, { "epoch": 28.416901408450705, "grad_norm": 0.6142464876174927, "learning_rate": 1.5165e-05, "loss": 0.0941, "step": 5058 }, { "epoch": 28.422535211267604, "grad_norm": 0.4247497618198395, "learning_rate": 1.5168000000000001e-05, "loss": 0.0534, "step": 5059 }, { "epoch": 28.428169014084506, "grad_norm": 0.4785853624343872, "learning_rate": 1.5171000000000001e-05, "loss": 0.0278, "step": 5060 }, { "epoch": 28.43380281690141, "grad_norm": 0.5297704935073853, "learning_rate": 1.5174e-05, "loss": 0.0394, "step": 5061 }, { "epoch": 28.43943661971831, "grad_norm": 0.4901992380619049, "learning_rate": 1.5177e-05, "loss": 0.0799, "step": 5062 }, { "epoch": 28.44507042253521, "grad_norm": 0.6108081340789795, "learning_rate": 1.518e-05, "loss": 0.0674, "step": 5063 }, { "epoch": 28.450704225352112, "grad_norm": 0.6629579663276672, "learning_rate": 1.5183e-05, "loss": 0.026, "step": 5064 }, { "epoch": 28.456338028169014, "grad_norm": 0.6000758409500122, "learning_rate": 1.5186e-05, "loss": 0.0351, "step": 5065 }, { "epoch": 28.461971830985917, "grad_norm": 2.086622953414917, "learning_rate": 1.5189e-05, "loss": 0.0337, "step": 5066 }, { "epoch": 28.467605633802815, "grad_norm": 0.4192711412906647, "learning_rate": 1.5192e-05, "loss": 0.0274, "step": 5067 }, { "epoch": 28.473239436619718, "grad_norm": 0.7077690958976746, "learning_rate": 1.5195e-05, "loss": 0.0524, "step": 5068 }, { "epoch": 28.47887323943662, "grad_norm": 0.6319155693054199, "learning_rate": 1.5198000000000003e-05, "loss": 0.0266, "step": 5069 }, { "epoch": 28.484507042253522, "grad_norm": 0.5046270489692688, "learning_rate": 1.5201000000000002e-05, "loss": 0.0829, "step": 5070 }, { "epoch": 28.49014084507042, "grad_norm": 0.5677661299705505, "learning_rate": 1.5204e-05, "loss": 0.0477, "step": 5071 }, { "epoch": 28.495774647887323, "grad_norm": 0.5993872880935669, "learning_rate": 1.5207e-05, "loss": 0.0424, "step": 5072 }, { "epoch": 28.501408450704226, "grad_norm": 1.208512306213379, "learning_rate": 1.521e-05, "loss": 0.2478, "step": 5073 }, { "epoch": 28.507042253521128, "grad_norm": 0.7113643884658813, "learning_rate": 1.5213e-05, "loss": 0.1171, "step": 5074 }, { "epoch": 28.512676056338027, "grad_norm": 1.5102542638778687, "learning_rate": 1.5216e-05, "loss": 0.1454, "step": 5075 }, { "epoch": 28.51830985915493, "grad_norm": 0.7796016335487366, "learning_rate": 1.5219e-05, "loss": 0.1102, "step": 5076 }, { "epoch": 28.52394366197183, "grad_norm": 0.8712618947029114, "learning_rate": 1.5222e-05, "loss": 0.1011, "step": 5077 }, { "epoch": 28.529577464788733, "grad_norm": 0.6349594593048096, "learning_rate": 1.5224999999999999e-05, "loss": 0.096, "step": 5078 }, { "epoch": 28.535211267605632, "grad_norm": 0.52194744348526, "learning_rate": 1.5228000000000002e-05, "loss": 0.0798, "step": 5079 }, { "epoch": 28.540845070422534, "grad_norm": 0.8381851315498352, "learning_rate": 1.5231000000000002e-05, "loss": 0.1404, "step": 5080 }, { "epoch": 28.546478873239437, "grad_norm": 0.7942418456077576, "learning_rate": 1.5234000000000002e-05, "loss": 0.0962, "step": 5081 }, { "epoch": 28.55211267605634, "grad_norm": 0.8814622163772583, "learning_rate": 1.5237000000000002e-05, "loss": 0.0816, "step": 5082 }, { "epoch": 28.557746478873238, "grad_norm": 0.8171583414077759, "learning_rate": 1.524e-05, "loss": 0.1026, "step": 5083 }, { "epoch": 28.56338028169014, "grad_norm": 0.6452773213386536, "learning_rate": 1.5243e-05, "loss": 0.0702, "step": 5084 }, { "epoch": 28.569014084507042, "grad_norm": 0.5273996591567993, "learning_rate": 1.5246e-05, "loss": 0.0843, "step": 5085 }, { "epoch": 28.574647887323945, "grad_norm": 0.4860202372074127, "learning_rate": 1.5249e-05, "loss": 0.0567, "step": 5086 }, { "epoch": 28.580281690140843, "grad_norm": 0.7411065697669983, "learning_rate": 1.5251999999999999e-05, "loss": 0.0903, "step": 5087 }, { "epoch": 28.585915492957746, "grad_norm": 0.5044342279434204, "learning_rate": 1.5254999999999999e-05, "loss": 0.0723, "step": 5088 }, { "epoch": 28.591549295774648, "grad_norm": 0.6983991265296936, "learning_rate": 1.5258000000000002e-05, "loss": 0.0803, "step": 5089 }, { "epoch": 28.59718309859155, "grad_norm": 0.819882869720459, "learning_rate": 1.5261000000000002e-05, "loss": 0.0806, "step": 5090 }, { "epoch": 28.60281690140845, "grad_norm": 0.8138038516044617, "learning_rate": 1.5264e-05, "loss": 0.0742, "step": 5091 }, { "epoch": 28.60845070422535, "grad_norm": 0.6654368042945862, "learning_rate": 1.5267e-05, "loss": 0.0373, "step": 5092 }, { "epoch": 28.614084507042254, "grad_norm": 0.4957199692726135, "learning_rate": 1.527e-05, "loss": 0.0899, "step": 5093 }, { "epoch": 28.619718309859156, "grad_norm": 0.6170735955238342, "learning_rate": 1.5273e-05, "loss": 0.0415, "step": 5094 }, { "epoch": 28.625352112676055, "grad_norm": 0.630759596824646, "learning_rate": 1.5276e-05, "loss": 0.0966, "step": 5095 }, { "epoch": 28.630985915492957, "grad_norm": 0.5733283758163452, "learning_rate": 1.5279e-05, "loss": 0.0395, "step": 5096 }, { "epoch": 28.63661971830986, "grad_norm": 0.607933521270752, "learning_rate": 1.5282e-05, "loss": 0.0415, "step": 5097 }, { "epoch": 28.64225352112676, "grad_norm": 1.8056646585464478, "learning_rate": 1.5285e-05, "loss": 0.0703, "step": 5098 }, { "epoch": 28.647887323943664, "grad_norm": 0.6321905851364136, "learning_rate": 1.5288000000000003e-05, "loss": 0.0455, "step": 5099 }, { "epoch": 28.653521126760563, "grad_norm": 0.610551118850708, "learning_rate": 1.5291000000000003e-05, "loss": 0.0315, "step": 5100 }, { "epoch": 28.659154929577465, "grad_norm": 0.5070680379867554, "learning_rate": 1.5294000000000003e-05, "loss": 0.0532, "step": 5101 }, { "epoch": 28.664788732394367, "grad_norm": 0.435300350189209, "learning_rate": 1.5297e-05, "loss": 0.0371, "step": 5102 }, { "epoch": 28.670422535211266, "grad_norm": 0.591464638710022, "learning_rate": 1.53e-05, "loss": 0.0352, "step": 5103 }, { "epoch": 28.676056338028168, "grad_norm": 0.5689784288406372, "learning_rate": 1.5303e-05, "loss": 0.0728, "step": 5104 }, { "epoch": 28.68169014084507, "grad_norm": 0.8144098520278931, "learning_rate": 1.5306e-05, "loss": 0.0616, "step": 5105 }, { "epoch": 28.687323943661973, "grad_norm": 0.8304027915000916, "learning_rate": 1.5309e-05, "loss": 0.0352, "step": 5106 }, { "epoch": 28.692957746478875, "grad_norm": 0.48128542304039, "learning_rate": 1.5312e-05, "loss": 0.025, "step": 5107 }, { "epoch": 28.698591549295774, "grad_norm": 0.4668400287628174, "learning_rate": 1.5314999999999998e-05, "loss": 0.0327, "step": 5108 }, { "epoch": 28.704225352112676, "grad_norm": 0.5240230560302734, "learning_rate": 1.5318e-05, "loss": 0.0418, "step": 5109 }, { "epoch": 28.70985915492958, "grad_norm": 0.7148030996322632, "learning_rate": 1.5321e-05, "loss": 0.0468, "step": 5110 }, { "epoch": 28.71549295774648, "grad_norm": 0.661095917224884, "learning_rate": 1.5324e-05, "loss": 0.0291, "step": 5111 }, { "epoch": 28.72112676056338, "grad_norm": 0.3988981246948242, "learning_rate": 1.5327e-05, "loss": 0.0775, "step": 5112 }, { "epoch": 28.72676056338028, "grad_norm": 0.3505437672138214, "learning_rate": 1.533e-05, "loss": 0.0148, "step": 5113 }, { "epoch": 28.732394366197184, "grad_norm": 0.6742246747016907, "learning_rate": 1.5333e-05, "loss": 0.0204, "step": 5114 }, { "epoch": 28.738028169014086, "grad_norm": 0.517822265625, "learning_rate": 1.5336e-05, "loss": 0.0732, "step": 5115 }, { "epoch": 28.743661971830985, "grad_norm": 0.3959486484527588, "learning_rate": 1.5339e-05, "loss": 0.0261, "step": 5116 }, { "epoch": 28.749295774647887, "grad_norm": 0.9901692271232605, "learning_rate": 1.5342e-05, "loss": 0.1818, "step": 5117 }, { "epoch": 28.75492957746479, "grad_norm": 0.6848888993263245, "learning_rate": 1.5345e-05, "loss": 0.1212, "step": 5118 }, { "epoch": 28.760563380281692, "grad_norm": 0.7092110514640808, "learning_rate": 1.5348000000000003e-05, "loss": 0.1206, "step": 5119 }, { "epoch": 28.76619718309859, "grad_norm": 0.7864041924476624, "learning_rate": 1.5351000000000003e-05, "loss": 0.1516, "step": 5120 }, { "epoch": 28.771830985915493, "grad_norm": 0.9141786694526672, "learning_rate": 1.5354000000000002e-05, "loss": 0.1536, "step": 5121 }, { "epoch": 28.777464788732395, "grad_norm": 0.9448162317276001, "learning_rate": 1.5357000000000002e-05, "loss": 0.1273, "step": 5122 }, { "epoch": 28.783098591549297, "grad_norm": 0.7076917886734009, "learning_rate": 1.5360000000000002e-05, "loss": 0.129, "step": 5123 }, { "epoch": 28.788732394366196, "grad_norm": 0.6777166128158569, "learning_rate": 1.5363000000000002e-05, "loss": 0.1404, "step": 5124 }, { "epoch": 28.7943661971831, "grad_norm": 0.6132952570915222, "learning_rate": 1.5366e-05, "loss": 0.08, "step": 5125 }, { "epoch": 28.8, "grad_norm": 0.6862670183181763, "learning_rate": 1.5368999999999998e-05, "loss": 0.0776, "step": 5126 }, { "epoch": 28.805633802816903, "grad_norm": 1.0282344818115234, "learning_rate": 1.5371999999999998e-05, "loss": 0.098, "step": 5127 }, { "epoch": 28.8112676056338, "grad_norm": 0.5991065502166748, "learning_rate": 1.5374999999999998e-05, "loss": 0.0693, "step": 5128 }, { "epoch": 28.816901408450704, "grad_norm": 0.649932861328125, "learning_rate": 1.5377999999999997e-05, "loss": 0.1158, "step": 5129 }, { "epoch": 28.822535211267606, "grad_norm": 0.5482590198516846, "learning_rate": 1.5381e-05, "loss": 0.0792, "step": 5130 }, { "epoch": 28.82816901408451, "grad_norm": 0.48336032032966614, "learning_rate": 1.5384e-05, "loss": 0.0513, "step": 5131 }, { "epoch": 28.833802816901407, "grad_norm": 0.5548399686813354, "learning_rate": 1.5387e-05, "loss": 0.063, "step": 5132 }, { "epoch": 28.83943661971831, "grad_norm": 0.5412296056747437, "learning_rate": 1.539e-05, "loss": 0.0552, "step": 5133 }, { "epoch": 28.845070422535212, "grad_norm": 0.8137805461883545, "learning_rate": 1.5393e-05, "loss": 0.0593, "step": 5134 }, { "epoch": 28.850704225352114, "grad_norm": 0.6455215215682983, "learning_rate": 1.5396e-05, "loss": 0.0611, "step": 5135 }, { "epoch": 28.856338028169013, "grad_norm": 0.5334020853042603, "learning_rate": 1.5399e-05, "loss": 0.0496, "step": 5136 }, { "epoch": 28.861971830985915, "grad_norm": 0.559784471988678, "learning_rate": 1.5402e-05, "loss": 0.0388, "step": 5137 }, { "epoch": 28.867605633802818, "grad_norm": 0.7096861600875854, "learning_rate": 1.5405e-05, "loss": 0.0769, "step": 5138 }, { "epoch": 28.87323943661972, "grad_norm": 0.5846599340438843, "learning_rate": 1.5408e-05, "loss": 0.0506, "step": 5139 }, { "epoch": 28.87887323943662, "grad_norm": 0.5305101871490479, "learning_rate": 1.5411000000000002e-05, "loss": 0.0503, "step": 5140 }, { "epoch": 28.88450704225352, "grad_norm": 0.6297574639320374, "learning_rate": 1.5414000000000002e-05, "loss": 0.0582, "step": 5141 }, { "epoch": 28.890140845070423, "grad_norm": 0.7346473336219788, "learning_rate": 1.5417e-05, "loss": 0.0603, "step": 5142 }, { "epoch": 28.895774647887325, "grad_norm": 0.5115442872047424, "learning_rate": 1.542e-05, "loss": 0.0311, "step": 5143 }, { "epoch": 28.901408450704224, "grad_norm": 0.4377557039260864, "learning_rate": 1.5423e-05, "loss": 0.0373, "step": 5144 }, { "epoch": 28.907042253521126, "grad_norm": 1.0442829132080078, "learning_rate": 1.5426e-05, "loss": 0.0386, "step": 5145 }, { "epoch": 28.91267605633803, "grad_norm": 0.5890056490898132, "learning_rate": 1.5429e-05, "loss": 0.0531, "step": 5146 }, { "epoch": 28.91830985915493, "grad_norm": 0.6472970843315125, "learning_rate": 1.5432e-05, "loss": 0.0511, "step": 5147 }, { "epoch": 28.92394366197183, "grad_norm": 0.7853546738624573, "learning_rate": 1.5435e-05, "loss": 0.0436, "step": 5148 }, { "epoch": 28.929577464788732, "grad_norm": 0.5301849246025085, "learning_rate": 1.5438e-05, "loss": 0.1052, "step": 5149 }, { "epoch": 28.935211267605634, "grad_norm": 0.9190493822097778, "learning_rate": 1.5441000000000003e-05, "loss": 0.0838, "step": 5150 }, { "epoch": 28.940845070422537, "grad_norm": 0.4123941659927368, "learning_rate": 1.5444e-05, "loss": 0.0529, "step": 5151 }, { "epoch": 28.946478873239435, "grad_norm": 1.5943779945373535, "learning_rate": 1.5447e-05, "loss": 0.0457, "step": 5152 }, { "epoch": 28.952112676056338, "grad_norm": 0.45874521136283875, "learning_rate": 1.545e-05, "loss": 0.0666, "step": 5153 }, { "epoch": 28.95774647887324, "grad_norm": 0.6953856945037842, "learning_rate": 1.5453e-05, "loss": 0.0435, "step": 5154 }, { "epoch": 28.963380281690142, "grad_norm": 0.3773043751716614, "learning_rate": 1.5456e-05, "loss": 0.0171, "step": 5155 }, { "epoch": 28.96901408450704, "grad_norm": 0.411206990480423, "learning_rate": 1.5459e-05, "loss": 0.0329, "step": 5156 }, { "epoch": 28.974647887323943, "grad_norm": 0.6514427065849304, "learning_rate": 1.5462e-05, "loss": 0.0416, "step": 5157 }, { "epoch": 28.980281690140846, "grad_norm": 0.7008411884307861, "learning_rate": 1.5465e-05, "loss": 0.0305, "step": 5158 }, { "epoch": 28.985915492957748, "grad_norm": 0.5711977481842041, "learning_rate": 1.5467999999999998e-05, "loss": 0.0169, "step": 5159 }, { "epoch": 28.991549295774647, "grad_norm": 0.5597488284111023, "learning_rate": 1.5471e-05, "loss": 0.0182, "step": 5160 }, { "epoch": 28.99718309859155, "grad_norm": 0.8346076607704163, "learning_rate": 1.5474e-05, "loss": 0.0781, "step": 5161 }, { "epoch": 29.0, "grad_norm": 0.3329528272151947, "learning_rate": 1.5477e-05, "loss": 0.0069, "step": 5162 }, { "epoch": 29.005633802816902, "grad_norm": 0.6511438488960266, "learning_rate": 1.548e-05, "loss": 0.1367, "step": 5163 }, { "epoch": 29.011267605633805, "grad_norm": 0.6223174929618835, "learning_rate": 1.5483e-05, "loss": 0.1237, "step": 5164 }, { "epoch": 29.016901408450703, "grad_norm": 0.7894920706748962, "learning_rate": 1.5486e-05, "loss": 0.1269, "step": 5165 }, { "epoch": 29.022535211267606, "grad_norm": 0.8292738795280457, "learning_rate": 1.5489e-05, "loss": 0.166, "step": 5166 }, { "epoch": 29.028169014084508, "grad_norm": 0.7366437911987305, "learning_rate": 1.5492e-05, "loss": 0.1209, "step": 5167 }, { "epoch": 29.03380281690141, "grad_norm": 0.5469945669174194, "learning_rate": 1.5495e-05, "loss": 0.1041, "step": 5168 }, { "epoch": 29.03943661971831, "grad_norm": 0.5740922689437866, "learning_rate": 1.5498e-05, "loss": 0.1219, "step": 5169 }, { "epoch": 29.04507042253521, "grad_norm": 0.7198240160942078, "learning_rate": 1.5501000000000003e-05, "loss": 0.0807, "step": 5170 }, { "epoch": 29.050704225352113, "grad_norm": 0.6353017091751099, "learning_rate": 1.5504000000000003e-05, "loss": 0.085, "step": 5171 }, { "epoch": 29.056338028169016, "grad_norm": 0.6495283842086792, "learning_rate": 1.5507000000000002e-05, "loss": 0.1216, "step": 5172 }, { "epoch": 29.061971830985915, "grad_norm": 0.5703001618385315, "learning_rate": 1.5510000000000002e-05, "loss": 0.0656, "step": 5173 }, { "epoch": 29.067605633802817, "grad_norm": 0.7305501103401184, "learning_rate": 1.5513000000000002e-05, "loss": 0.059, "step": 5174 }, { "epoch": 29.07323943661972, "grad_norm": 0.4775819778442383, "learning_rate": 1.5516000000000002e-05, "loss": 0.0766, "step": 5175 }, { "epoch": 29.07887323943662, "grad_norm": 0.45026785135269165, "learning_rate": 1.5518999999999998e-05, "loss": 0.0554, "step": 5176 }, { "epoch": 29.08450704225352, "grad_norm": 0.5682210922241211, "learning_rate": 1.5521999999999998e-05, "loss": 0.0416, "step": 5177 }, { "epoch": 29.090140845070422, "grad_norm": 0.49401506781578064, "learning_rate": 1.5524999999999998e-05, "loss": 0.0484, "step": 5178 }, { "epoch": 29.095774647887325, "grad_norm": 0.4680187702178955, "learning_rate": 1.5527999999999998e-05, "loss": 0.0669, "step": 5179 }, { "epoch": 29.101408450704227, "grad_norm": 0.7089043855667114, "learning_rate": 1.5531e-05, "loss": 0.075, "step": 5180 }, { "epoch": 29.107042253521126, "grad_norm": 0.5301341414451599, "learning_rate": 1.5534e-05, "loss": 0.0375, "step": 5181 }, { "epoch": 29.112676056338028, "grad_norm": 0.6662759184837341, "learning_rate": 1.5537e-05, "loss": 0.0451, "step": 5182 }, { "epoch": 29.11830985915493, "grad_norm": 0.9311359524726868, "learning_rate": 1.554e-05, "loss": 0.0572, "step": 5183 }, { "epoch": 29.123943661971833, "grad_norm": 0.6881885528564453, "learning_rate": 1.5543e-05, "loss": 0.0494, "step": 5184 }, { "epoch": 29.12957746478873, "grad_norm": 0.6180036664009094, "learning_rate": 1.5546e-05, "loss": 0.0307, "step": 5185 }, { "epoch": 29.135211267605634, "grad_norm": 0.6315963268280029, "learning_rate": 1.5549e-05, "loss": 0.0463, "step": 5186 }, { "epoch": 29.140845070422536, "grad_norm": 0.44603970646858215, "learning_rate": 1.5552e-05, "loss": 0.0467, "step": 5187 }, { "epoch": 29.146478873239438, "grad_norm": 0.4284903407096863, "learning_rate": 1.5555e-05, "loss": 0.0307, "step": 5188 }, { "epoch": 29.152112676056337, "grad_norm": 0.8106344938278198, "learning_rate": 1.5558e-05, "loss": 0.0848, "step": 5189 }, { "epoch": 29.15774647887324, "grad_norm": 0.43273743987083435, "learning_rate": 1.5561000000000002e-05, "loss": 0.024, "step": 5190 }, { "epoch": 29.16338028169014, "grad_norm": 0.4170507788658142, "learning_rate": 1.5564000000000002e-05, "loss": 0.028, "step": 5191 }, { "epoch": 29.169014084507044, "grad_norm": 0.4646318256855011, "learning_rate": 1.5567000000000002e-05, "loss": 0.0442, "step": 5192 }, { "epoch": 29.174647887323943, "grad_norm": 0.38459670543670654, "learning_rate": 1.5570000000000002e-05, "loss": 0.0229, "step": 5193 }, { "epoch": 29.180281690140845, "grad_norm": 0.8296533823013306, "learning_rate": 1.5573e-05, "loss": 0.0481, "step": 5194 }, { "epoch": 29.185915492957747, "grad_norm": 0.9250654578208923, "learning_rate": 1.5576e-05, "loss": 0.0403, "step": 5195 }, { "epoch": 29.19154929577465, "grad_norm": 0.5740807056427002, "learning_rate": 1.5579e-05, "loss": 0.0754, "step": 5196 }, { "epoch": 29.197183098591548, "grad_norm": 1.13681960105896, "learning_rate": 1.5582e-05, "loss": 0.0532, "step": 5197 }, { "epoch": 29.20281690140845, "grad_norm": 0.46878781914711, "learning_rate": 1.5585e-05, "loss": 0.0266, "step": 5198 }, { "epoch": 29.208450704225353, "grad_norm": 0.7484666109085083, "learning_rate": 1.5588e-05, "loss": 0.0327, "step": 5199 }, { "epoch": 29.214084507042255, "grad_norm": 0.49039265513420105, "learning_rate": 1.5591e-05, "loss": 0.0183, "step": 5200 }, { "epoch": 29.219718309859154, "grad_norm": 0.4683140218257904, "learning_rate": 1.5594e-05, "loss": 0.0359, "step": 5201 }, { "epoch": 29.225352112676056, "grad_norm": 0.5157768130302429, "learning_rate": 1.5597e-05, "loss": 0.0294, "step": 5202 }, { "epoch": 29.23098591549296, "grad_norm": 0.584134042263031, "learning_rate": 1.56e-05, "loss": 0.0212, "step": 5203 }, { "epoch": 29.23661971830986, "grad_norm": 1.7785794734954834, "learning_rate": 1.5603e-05, "loss": 0.015, "step": 5204 }, { "epoch": 29.24225352112676, "grad_norm": 0.5211226940155029, "learning_rate": 1.5606e-05, "loss": 0.0234, "step": 5205 }, { "epoch": 29.24788732394366, "grad_norm": 0.7227548360824585, "learning_rate": 1.5609e-05, "loss": 0.0272, "step": 5206 }, { "epoch": 29.253521126760564, "grad_norm": 0.7595842480659485, "learning_rate": 1.5612e-05, "loss": 0.17, "step": 5207 }, { "epoch": 29.259154929577466, "grad_norm": 0.5374959111213684, "learning_rate": 1.5615e-05, "loss": 0.0872, "step": 5208 }, { "epoch": 29.264788732394365, "grad_norm": 0.710433840751648, "learning_rate": 1.5618e-05, "loss": 0.1071, "step": 5209 }, { "epoch": 29.270422535211267, "grad_norm": 0.5820371508598328, "learning_rate": 1.5621000000000002e-05, "loss": 0.0917, "step": 5210 }, { "epoch": 29.27605633802817, "grad_norm": 0.5214697122573853, "learning_rate": 1.5624e-05, "loss": 0.0771, "step": 5211 }, { "epoch": 29.281690140845072, "grad_norm": 0.6296930313110352, "learning_rate": 1.5627e-05, "loss": 0.1274, "step": 5212 }, { "epoch": 29.28732394366197, "grad_norm": 0.8432474732398987, "learning_rate": 1.563e-05, "loss": 0.0966, "step": 5213 }, { "epoch": 29.292957746478873, "grad_norm": 0.6227443814277649, "learning_rate": 1.5633e-05, "loss": 0.0862, "step": 5214 }, { "epoch": 29.298591549295775, "grad_norm": 0.5820328593254089, "learning_rate": 1.5636e-05, "loss": 0.1261, "step": 5215 }, { "epoch": 29.304225352112677, "grad_norm": 0.557923436164856, "learning_rate": 1.5639e-05, "loss": 0.0683, "step": 5216 }, { "epoch": 29.309859154929576, "grad_norm": 0.6467063426971436, "learning_rate": 1.5642e-05, "loss": 0.0953, "step": 5217 }, { "epoch": 29.31549295774648, "grad_norm": 0.5140814185142517, "learning_rate": 1.5645e-05, "loss": 0.0729, "step": 5218 }, { "epoch": 29.32112676056338, "grad_norm": 0.5775060057640076, "learning_rate": 1.5648e-05, "loss": 0.0772, "step": 5219 }, { "epoch": 29.326760563380283, "grad_norm": 0.6231495141983032, "learning_rate": 1.5651000000000003e-05, "loss": 0.1733, "step": 5220 }, { "epoch": 29.33239436619718, "grad_norm": 0.5065089464187622, "learning_rate": 1.5654000000000003e-05, "loss": 0.0502, "step": 5221 }, { "epoch": 29.338028169014084, "grad_norm": 1.0542914867401123, "learning_rate": 1.5657000000000003e-05, "loss": 0.0769, "step": 5222 }, { "epoch": 29.343661971830986, "grad_norm": 0.3842698037624359, "learning_rate": 1.5660000000000003e-05, "loss": 0.039, "step": 5223 }, { "epoch": 29.34929577464789, "grad_norm": 0.6025784611701965, "learning_rate": 1.5663000000000002e-05, "loss": 0.061, "step": 5224 }, { "epoch": 29.354929577464787, "grad_norm": 0.6463728547096252, "learning_rate": 1.5666e-05, "loss": 0.0608, "step": 5225 }, { "epoch": 29.36056338028169, "grad_norm": 0.5309611558914185, "learning_rate": 1.5669e-05, "loss": 0.0536, "step": 5226 }, { "epoch": 29.366197183098592, "grad_norm": 0.7551985383033752, "learning_rate": 1.5672e-05, "loss": 0.0539, "step": 5227 }, { "epoch": 29.371830985915494, "grad_norm": 0.9451178312301636, "learning_rate": 1.5674999999999998e-05, "loss": 0.0424, "step": 5228 }, { "epoch": 29.377464788732393, "grad_norm": 0.5031388401985168, "learning_rate": 1.5677999999999998e-05, "loss": 0.083, "step": 5229 }, { "epoch": 29.383098591549295, "grad_norm": 0.8105326294898987, "learning_rate": 1.5681e-05, "loss": 0.0352, "step": 5230 }, { "epoch": 29.388732394366198, "grad_norm": 0.5598998069763184, "learning_rate": 1.5684e-05, "loss": 0.041, "step": 5231 }, { "epoch": 29.3943661971831, "grad_norm": 0.6031084656715393, "learning_rate": 1.5687e-05, "loss": 0.0542, "step": 5232 }, { "epoch": 29.4, "grad_norm": 0.4669267535209656, "learning_rate": 1.569e-05, "loss": 0.0445, "step": 5233 }, { "epoch": 29.4056338028169, "grad_norm": 0.48490434885025024, "learning_rate": 1.5693e-05, "loss": 0.0388, "step": 5234 }, { "epoch": 29.411267605633803, "grad_norm": 0.4290199875831604, "learning_rate": 1.5696e-05, "loss": 0.0306, "step": 5235 }, { "epoch": 29.416901408450705, "grad_norm": 0.5076020956039429, "learning_rate": 1.5699e-05, "loss": 0.0687, "step": 5236 }, { "epoch": 29.422535211267604, "grad_norm": 0.6565372943878174, "learning_rate": 1.5702e-05, "loss": 0.0347, "step": 5237 }, { "epoch": 29.428169014084506, "grad_norm": 0.8009961247444153, "learning_rate": 1.5705e-05, "loss": 0.0668, "step": 5238 }, { "epoch": 29.43380281690141, "grad_norm": 0.5368661284446716, "learning_rate": 1.5708e-05, "loss": 0.0593, "step": 5239 }, { "epoch": 29.43943661971831, "grad_norm": 0.7338910102844238, "learning_rate": 1.5711000000000003e-05, "loss": 0.0938, "step": 5240 }, { "epoch": 29.44507042253521, "grad_norm": 0.5612071752548218, "learning_rate": 1.5714000000000002e-05, "loss": 0.0775, "step": 5241 }, { "epoch": 29.450704225352112, "grad_norm": 0.6637964248657227, "learning_rate": 1.5717000000000002e-05, "loss": 0.0261, "step": 5242 }, { "epoch": 29.456338028169014, "grad_norm": 1.5263164043426514, "learning_rate": 1.5720000000000002e-05, "loss": 0.0688, "step": 5243 }, { "epoch": 29.461971830985917, "grad_norm": 0.504317045211792, "learning_rate": 1.5723000000000002e-05, "loss": 0.0366, "step": 5244 }, { "epoch": 29.467605633802815, "grad_norm": 0.6816652417182922, "learning_rate": 1.5726e-05, "loss": 0.0447, "step": 5245 }, { "epoch": 29.473239436619718, "grad_norm": 0.6391515731811523, "learning_rate": 1.5729e-05, "loss": 0.0826, "step": 5246 }, { "epoch": 29.47887323943662, "grad_norm": 1.5434987545013428, "learning_rate": 1.5732e-05, "loss": 0.0291, "step": 5247 }, { "epoch": 29.484507042253522, "grad_norm": 0.7233259081840515, "learning_rate": 1.5735e-05, "loss": 0.0448, "step": 5248 }, { "epoch": 29.49014084507042, "grad_norm": 0.5765902400016785, "learning_rate": 1.5737999999999997e-05, "loss": 0.0644, "step": 5249 }, { "epoch": 29.495774647887323, "grad_norm": 0.43511083722114563, "learning_rate": 1.5741e-05, "loss": 0.0361, "step": 5250 }, { "epoch": 29.501408450704226, "grad_norm": 0.8154364228248596, "learning_rate": 1.5744e-05, "loss": 0.168, "step": 5251 }, { "epoch": 29.507042253521128, "grad_norm": 0.7502539753913879, "learning_rate": 1.5747e-05, "loss": 0.13, "step": 5252 }, { "epoch": 29.512676056338027, "grad_norm": 0.506389856338501, "learning_rate": 1.575e-05, "loss": 0.1, "step": 5253 }, { "epoch": 29.51830985915493, "grad_norm": 0.5501558780670166, "learning_rate": 1.5753e-05, "loss": 0.1044, "step": 5254 }, { "epoch": 29.52394366197183, "grad_norm": 0.68449866771698, "learning_rate": 1.5756e-05, "loss": 0.0935, "step": 5255 }, { "epoch": 29.529577464788733, "grad_norm": 0.8795751333236694, "learning_rate": 1.5759e-05, "loss": 0.1284, "step": 5256 }, { "epoch": 29.535211267605632, "grad_norm": 0.6177387833595276, "learning_rate": 1.5762e-05, "loss": 0.0943, "step": 5257 }, { "epoch": 29.540845070422534, "grad_norm": 0.756565511226654, "learning_rate": 1.5765e-05, "loss": 0.1025, "step": 5258 }, { "epoch": 29.546478873239437, "grad_norm": 0.6733160018920898, "learning_rate": 1.5768e-05, "loss": 0.0711, "step": 5259 }, { "epoch": 29.55211267605634, "grad_norm": 0.711150586605072, "learning_rate": 1.5771e-05, "loss": 0.103, "step": 5260 }, { "epoch": 29.557746478873238, "grad_norm": 0.5498871207237244, "learning_rate": 1.5774000000000002e-05, "loss": 0.0718, "step": 5261 }, { "epoch": 29.56338028169014, "grad_norm": 0.5422618389129639, "learning_rate": 1.5777e-05, "loss": 0.0704, "step": 5262 }, { "epoch": 29.569014084507042, "grad_norm": 0.6359525918960571, "learning_rate": 1.578e-05, "loss": 0.0719, "step": 5263 }, { "epoch": 29.574647887323945, "grad_norm": 0.574178159236908, "learning_rate": 1.5783e-05, "loss": 0.0757, "step": 5264 }, { "epoch": 29.580281690140843, "grad_norm": 0.6868993043899536, "learning_rate": 1.5786e-05, "loss": 0.1435, "step": 5265 }, { "epoch": 29.585915492957746, "grad_norm": 0.7080891728401184, "learning_rate": 1.5789e-05, "loss": 0.0841, "step": 5266 }, { "epoch": 29.591549295774648, "grad_norm": 0.5112836956977844, "learning_rate": 1.5792e-05, "loss": 0.044, "step": 5267 }, { "epoch": 29.59718309859155, "grad_norm": 0.8912848234176636, "learning_rate": 1.5795e-05, "loss": 0.1019, "step": 5268 }, { "epoch": 29.60281690140845, "grad_norm": 0.47767528891563416, "learning_rate": 1.5798e-05, "loss": 0.0426, "step": 5269 }, { "epoch": 29.60845070422535, "grad_norm": 0.6725708246231079, "learning_rate": 1.5801e-05, "loss": 0.0756, "step": 5270 }, { "epoch": 29.614084507042254, "grad_norm": 0.4701225757598877, "learning_rate": 1.5804000000000003e-05, "loss": 0.0655, "step": 5271 }, { "epoch": 29.619718309859156, "grad_norm": 0.7608067393302917, "learning_rate": 1.5807000000000003e-05, "loss": 0.0415, "step": 5272 }, { "epoch": 29.625352112676055, "grad_norm": 0.46251076459884644, "learning_rate": 1.5810000000000003e-05, "loss": 0.0697, "step": 5273 }, { "epoch": 29.630985915492957, "grad_norm": 0.6789788007736206, "learning_rate": 1.5813e-05, "loss": 0.0424, "step": 5274 }, { "epoch": 29.63661971830986, "grad_norm": 0.6965165138244629, "learning_rate": 1.5816e-05, "loss": 0.0634, "step": 5275 }, { "epoch": 29.64225352112676, "grad_norm": 0.4534487724304199, "learning_rate": 1.5819e-05, "loss": 0.0366, "step": 5276 }, { "epoch": 29.647887323943664, "grad_norm": 0.5228815078735352, "learning_rate": 1.5822e-05, "loss": 0.0434, "step": 5277 }, { "epoch": 29.653521126760563, "grad_norm": 0.5242321491241455, "learning_rate": 1.5825e-05, "loss": 0.0333, "step": 5278 }, { "epoch": 29.659154929577465, "grad_norm": 0.4653126001358032, "learning_rate": 1.5827999999999998e-05, "loss": 0.0452, "step": 5279 }, { "epoch": 29.664788732394367, "grad_norm": 0.6806649565696716, "learning_rate": 1.5830999999999998e-05, "loss": 0.0423, "step": 5280 }, { "epoch": 29.670422535211266, "grad_norm": 0.5749784111976624, "learning_rate": 1.5834e-05, "loss": 0.048, "step": 5281 }, { "epoch": 29.676056338028168, "grad_norm": 0.7563560009002686, "learning_rate": 1.5837e-05, "loss": 0.0821, "step": 5282 }, { "epoch": 29.68169014084507, "grad_norm": 0.5637146234512329, "learning_rate": 1.584e-05, "loss": 0.0376, "step": 5283 }, { "epoch": 29.687323943661973, "grad_norm": 0.32545408606529236, "learning_rate": 1.5843e-05, "loss": 0.0179, "step": 5284 }, { "epoch": 29.692957746478875, "grad_norm": 0.4282184839248657, "learning_rate": 1.5846e-05, "loss": 0.0191, "step": 5285 }, { "epoch": 29.698591549295774, "grad_norm": 0.570502758026123, "learning_rate": 1.5849e-05, "loss": 0.0407, "step": 5286 }, { "epoch": 29.704225352112676, "grad_norm": 0.5747480392456055, "learning_rate": 1.5852e-05, "loss": 0.0482, "step": 5287 }, { "epoch": 29.70985915492958, "grad_norm": 0.5830278396606445, "learning_rate": 1.5855e-05, "loss": 0.0466, "step": 5288 }, { "epoch": 29.71549295774648, "grad_norm": 0.4313574731349945, "learning_rate": 1.5858e-05, "loss": 0.0212, "step": 5289 }, { "epoch": 29.72112676056338, "grad_norm": 0.43484535813331604, "learning_rate": 1.5861e-05, "loss": 0.034, "step": 5290 }, { "epoch": 29.72676056338028, "grad_norm": 0.5468824505805969, "learning_rate": 1.5864000000000003e-05, "loss": 0.0162, "step": 5291 }, { "epoch": 29.732394366197184, "grad_norm": 1.8178751468658447, "learning_rate": 1.5867000000000002e-05, "loss": 0.0444, "step": 5292 }, { "epoch": 29.738028169014086, "grad_norm": 0.7535366415977478, "learning_rate": 1.5870000000000002e-05, "loss": 0.0239, "step": 5293 }, { "epoch": 29.743661971830985, "grad_norm": 0.976398766040802, "learning_rate": 1.5873000000000002e-05, "loss": 0.0314, "step": 5294 }, { "epoch": 29.749295774647887, "grad_norm": 1.0149894952774048, "learning_rate": 1.5876000000000002e-05, "loss": 0.1877, "step": 5295 }, { "epoch": 29.75492957746479, "grad_norm": 1.040798544883728, "learning_rate": 1.5879e-05, "loss": 0.1849, "step": 5296 }, { "epoch": 29.760563380281692, "grad_norm": 0.8907865285873413, "learning_rate": 1.5882e-05, "loss": 0.1429, "step": 5297 }, { "epoch": 29.76619718309859, "grad_norm": 0.553267240524292, "learning_rate": 1.5884999999999998e-05, "loss": 0.11, "step": 5298 }, { "epoch": 29.771830985915493, "grad_norm": 0.7970935702323914, "learning_rate": 1.5887999999999998e-05, "loss": 0.1718, "step": 5299 }, { "epoch": 29.777464788732395, "grad_norm": 0.5762118101119995, "learning_rate": 1.5890999999999997e-05, "loss": 0.095, "step": 5300 }, { "epoch": 29.783098591549297, "grad_norm": 0.6505810022354126, "learning_rate": 1.5894e-05, "loss": 0.0959, "step": 5301 }, { "epoch": 29.788732394366196, "grad_norm": 0.88006991147995, "learning_rate": 1.5897e-05, "loss": 0.1353, "step": 5302 }, { "epoch": 29.7943661971831, "grad_norm": 0.5859400033950806, "learning_rate": 1.59e-05, "loss": 0.1429, "step": 5303 }, { "epoch": 29.8, "grad_norm": 0.7552185654640198, "learning_rate": 1.5903e-05, "loss": 0.0675, "step": 5304 }, { "epoch": 29.805633802816903, "grad_norm": 0.5829064249992371, "learning_rate": 1.5906e-05, "loss": 0.0665, "step": 5305 }, { "epoch": 29.8112676056338, "grad_norm": 0.7265884280204773, "learning_rate": 1.5909e-05, "loss": 0.0969, "step": 5306 }, { "epoch": 29.816901408450704, "grad_norm": 0.5321835875511169, "learning_rate": 1.5912e-05, "loss": 0.0644, "step": 5307 }, { "epoch": 29.822535211267606, "grad_norm": 0.6013337969779968, "learning_rate": 1.5915e-05, "loss": 0.1034, "step": 5308 }, { "epoch": 29.82816901408451, "grad_norm": 0.5811848640441895, "learning_rate": 1.5918e-05, "loss": 0.0581, "step": 5309 }, { "epoch": 29.833802816901407, "grad_norm": 0.6387689113616943, "learning_rate": 1.5921e-05, "loss": 0.0784, "step": 5310 }, { "epoch": 29.83943661971831, "grad_norm": 0.5201472043991089, "learning_rate": 1.5924000000000002e-05, "loss": 0.0862, "step": 5311 }, { "epoch": 29.845070422535212, "grad_norm": 0.5194034576416016, "learning_rate": 1.5927000000000002e-05, "loss": 0.0348, "step": 5312 }, { "epoch": 29.850704225352114, "grad_norm": 0.5872905850410461, "learning_rate": 1.593e-05, "loss": 0.0572, "step": 5313 }, { "epoch": 29.856338028169013, "grad_norm": 0.734362006187439, "learning_rate": 1.5933e-05, "loss": 0.0522, "step": 5314 }, { "epoch": 29.861971830985915, "grad_norm": 0.7890540361404419, "learning_rate": 1.5936e-05, "loss": 0.0345, "step": 5315 }, { "epoch": 29.867605633802818, "grad_norm": 0.4846155047416687, "learning_rate": 1.5939e-05, "loss": 0.0683, "step": 5316 }, { "epoch": 29.87323943661972, "grad_norm": 0.46160247921943665, "learning_rate": 1.5942e-05, "loss": 0.0504, "step": 5317 }, { "epoch": 29.87887323943662, "grad_norm": 0.5353713631629944, "learning_rate": 1.5945e-05, "loss": 0.0457, "step": 5318 }, { "epoch": 29.88450704225352, "grad_norm": 0.4202885329723358, "learning_rate": 1.5948e-05, "loss": 0.0281, "step": 5319 }, { "epoch": 29.890140845070423, "grad_norm": 0.5654758810997009, "learning_rate": 1.5951e-05, "loss": 0.0432, "step": 5320 }, { "epoch": 29.895774647887325, "grad_norm": 0.6866729259490967, "learning_rate": 1.5954000000000003e-05, "loss": 0.0477, "step": 5321 }, { "epoch": 29.901408450704224, "grad_norm": 0.5644654631614685, "learning_rate": 1.5957000000000003e-05, "loss": 0.0516, "step": 5322 }, { "epoch": 29.907042253521126, "grad_norm": 0.43241775035858154, "learning_rate": 1.596e-05, "loss": 0.0359, "step": 5323 }, { "epoch": 29.91267605633803, "grad_norm": 0.47326958179473877, "learning_rate": 1.5963e-05, "loss": 0.0405, "step": 5324 }, { "epoch": 29.91830985915493, "grad_norm": 0.6474609971046448, "learning_rate": 1.5966e-05, "loss": 0.0387, "step": 5325 }, { "epoch": 29.92394366197183, "grad_norm": 0.7125644683837891, "learning_rate": 1.5969e-05, "loss": 0.0308, "step": 5326 }, { "epoch": 29.929577464788732, "grad_norm": 0.5479835271835327, "learning_rate": 1.5972e-05, "loss": 0.0368, "step": 5327 }, { "epoch": 29.935211267605634, "grad_norm": 0.3807334899902344, "learning_rate": 1.5975e-05, "loss": 0.0273, "step": 5328 }, { "epoch": 29.940845070422537, "grad_norm": 0.4832625687122345, "learning_rate": 1.5978e-05, "loss": 0.0429, "step": 5329 }, { "epoch": 29.946478873239435, "grad_norm": 0.9675916433334351, "learning_rate": 1.5980999999999998e-05, "loss": 0.0399, "step": 5330 }, { "epoch": 29.952112676056338, "grad_norm": 0.5020010471343994, "learning_rate": 1.5984e-05, "loss": 0.0464, "step": 5331 }, { "epoch": 29.95774647887324, "grad_norm": 0.35561344027519226, "learning_rate": 1.5987e-05, "loss": 0.013, "step": 5332 }, { "epoch": 29.963380281690142, "grad_norm": 0.531600296497345, "learning_rate": 1.599e-05, "loss": 0.0443, "step": 5333 }, { "epoch": 29.96901408450704, "grad_norm": 0.45576298236846924, "learning_rate": 1.5993e-05, "loss": 0.0484, "step": 5334 }, { "epoch": 29.974647887323943, "grad_norm": 0.5381224751472473, "learning_rate": 1.5996e-05, "loss": 0.0132, "step": 5335 }, { "epoch": 29.980281690140846, "grad_norm": 0.8124315738677979, "learning_rate": 1.5999e-05, "loss": 0.0692, "step": 5336 }, { "epoch": 29.985915492957748, "grad_norm": 0.4162590205669403, "learning_rate": 1.6002e-05, "loss": 0.041, "step": 5337 }, { "epoch": 29.991549295774647, "grad_norm": 0.5987299084663391, "learning_rate": 1.6005e-05, "loss": 0.0456, "step": 5338 }, { "epoch": 29.99718309859155, "grad_norm": 0.6891589164733887, "learning_rate": 1.6008e-05, "loss": 0.0768, "step": 5339 }, { "epoch": 30.0, "grad_norm": 0.5054416656494141, "learning_rate": 1.6011e-05, "loss": 0.0163, "step": 5340 }, { "epoch": 30.005633802816902, "grad_norm": 0.5896918177604675, "learning_rate": 1.6014000000000003e-05, "loss": 0.1466, "step": 5341 }, { "epoch": 30.011267605633805, "grad_norm": 0.7190629243850708, "learning_rate": 1.6017000000000003e-05, "loss": 0.1028, "step": 5342 }, { "epoch": 30.016901408450703, "grad_norm": 0.5966160297393799, "learning_rate": 1.6020000000000002e-05, "loss": 0.0993, "step": 5343 }, { "epoch": 30.022535211267606, "grad_norm": 0.6330658793449402, "learning_rate": 1.6023000000000002e-05, "loss": 0.1521, "step": 5344 }, { "epoch": 30.028169014084508, "grad_norm": 0.6371360421180725, "learning_rate": 1.6026000000000002e-05, "loss": 0.0861, "step": 5345 }, { "epoch": 30.03380281690141, "grad_norm": 0.6190137267112732, "learning_rate": 1.6029000000000002e-05, "loss": 0.0892, "step": 5346 }, { "epoch": 30.03943661971831, "grad_norm": 0.5732072591781616, "learning_rate": 1.6032e-05, "loss": 0.1462, "step": 5347 }, { "epoch": 30.04507042253521, "grad_norm": 0.7658596634864807, "learning_rate": 1.6034999999999998e-05, "loss": 0.1346, "step": 5348 }, { "epoch": 30.050704225352113, "grad_norm": 0.8525718450546265, "learning_rate": 1.6037999999999998e-05, "loss": 0.0638, "step": 5349 }, { "epoch": 30.056338028169016, "grad_norm": 0.8024362921714783, "learning_rate": 1.6040999999999998e-05, "loss": 0.1304, "step": 5350 }, { "epoch": 30.061971830985915, "grad_norm": 0.5279252529144287, "learning_rate": 1.6044e-05, "loss": 0.0591, "step": 5351 }, { "epoch": 30.067605633802817, "grad_norm": 0.5511128306388855, "learning_rate": 1.6047e-05, "loss": 0.0633, "step": 5352 }, { "epoch": 30.07323943661972, "grad_norm": 0.6188144087791443, "learning_rate": 1.605e-05, "loss": 0.0723, "step": 5353 }, { "epoch": 30.07887323943662, "grad_norm": 0.5734935998916626, "learning_rate": 1.6053e-05, "loss": 0.067, "step": 5354 }, { "epoch": 30.08450704225352, "grad_norm": 0.6437960267066956, "learning_rate": 1.6056e-05, "loss": 0.0639, "step": 5355 }, { "epoch": 30.090140845070422, "grad_norm": 0.5374909043312073, "learning_rate": 1.6059e-05, "loss": 0.0746, "step": 5356 }, { "epoch": 30.095774647887325, "grad_norm": 0.6549772620201111, "learning_rate": 1.6062e-05, "loss": 0.0665, "step": 5357 }, { "epoch": 30.101408450704227, "grad_norm": 0.5450600981712341, "learning_rate": 1.6065e-05, "loss": 0.0465, "step": 5358 }, { "epoch": 30.107042253521126, "grad_norm": 0.5043218731880188, "learning_rate": 1.6068e-05, "loss": 0.047, "step": 5359 }, { "epoch": 30.112676056338028, "grad_norm": 0.3894733190536499, "learning_rate": 1.6071e-05, "loss": 0.0294, "step": 5360 }, { "epoch": 30.11830985915493, "grad_norm": 0.621464192867279, "learning_rate": 1.6074000000000002e-05, "loss": 0.0554, "step": 5361 }, { "epoch": 30.123943661971833, "grad_norm": 0.7767791748046875, "learning_rate": 1.6077000000000002e-05, "loss": 0.0476, "step": 5362 }, { "epoch": 30.12957746478873, "grad_norm": 0.6023777723312378, "learning_rate": 1.6080000000000002e-05, "loss": 0.042, "step": 5363 }, { "epoch": 30.135211267605634, "grad_norm": 0.6250441074371338, "learning_rate": 1.6083000000000002e-05, "loss": 0.0374, "step": 5364 }, { "epoch": 30.140845070422536, "grad_norm": 0.3595234453678131, "learning_rate": 1.6086e-05, "loss": 0.031, "step": 5365 }, { "epoch": 30.146478873239438, "grad_norm": 0.4065183997154236, "learning_rate": 1.6089e-05, "loss": 0.0374, "step": 5366 }, { "epoch": 30.152112676056337, "grad_norm": 0.5390560030937195, "learning_rate": 1.6092e-05, "loss": 0.0738, "step": 5367 }, { "epoch": 30.15774647887324, "grad_norm": 0.4374127984046936, "learning_rate": 1.6095e-05, "loss": 0.0458, "step": 5368 }, { "epoch": 30.16338028169014, "grad_norm": 0.6200258731842041, "learning_rate": 1.6098e-05, "loss": 0.0268, "step": 5369 }, { "epoch": 30.169014084507044, "grad_norm": 0.6695671081542969, "learning_rate": 1.6101e-05, "loss": 0.0839, "step": 5370 }, { "epoch": 30.174647887323943, "grad_norm": 0.4485051929950714, "learning_rate": 1.6104000000000004e-05, "loss": 0.0308, "step": 5371 }, { "epoch": 30.180281690140845, "grad_norm": 0.39367127418518066, "learning_rate": 1.6107e-05, "loss": 0.0209, "step": 5372 }, { "epoch": 30.185915492957747, "grad_norm": 0.5144534111022949, "learning_rate": 1.611e-05, "loss": 0.0948, "step": 5373 }, { "epoch": 30.19154929577465, "grad_norm": 0.4490613639354706, "learning_rate": 1.6113e-05, "loss": 0.0244, "step": 5374 }, { "epoch": 30.197183098591548, "grad_norm": 0.5823401808738708, "learning_rate": 1.6116e-05, "loss": 0.0555, "step": 5375 }, { "epoch": 30.20281690140845, "grad_norm": 0.4115397334098816, "learning_rate": 1.6119e-05, "loss": 0.0325, "step": 5376 }, { "epoch": 30.208450704225353, "grad_norm": 0.44605621695518494, "learning_rate": 1.6122e-05, "loss": 0.036, "step": 5377 }, { "epoch": 30.214084507042255, "grad_norm": 0.4436214864253998, "learning_rate": 1.6125e-05, "loss": 0.0227, "step": 5378 }, { "epoch": 30.219718309859154, "grad_norm": 0.3625319004058838, "learning_rate": 1.6128e-05, "loss": 0.0147, "step": 5379 }, { "epoch": 30.225352112676056, "grad_norm": 0.49258244037628174, "learning_rate": 1.6131e-05, "loss": 0.0516, "step": 5380 }, { "epoch": 30.23098591549296, "grad_norm": 0.3849585950374603, "learning_rate": 1.6134e-05, "loss": 0.0163, "step": 5381 }, { "epoch": 30.23661971830986, "grad_norm": 0.6748790144920349, "learning_rate": 1.6137e-05, "loss": 0.1053, "step": 5382 }, { "epoch": 30.24225352112676, "grad_norm": 0.38485950231552124, "learning_rate": 1.614e-05, "loss": 0.0093, "step": 5383 }, { "epoch": 30.24788732394366, "grad_norm": 0.7366259694099426, "learning_rate": 1.6143e-05, "loss": 0.0166, "step": 5384 }, { "epoch": 30.253521126760564, "grad_norm": 0.8154985308647156, "learning_rate": 1.6146e-05, "loss": 0.1663, "step": 5385 }, { "epoch": 30.259154929577466, "grad_norm": 0.6390140056610107, "learning_rate": 1.6149e-05, "loss": 0.0896, "step": 5386 }, { "epoch": 30.264788732394365, "grad_norm": 0.7036663889884949, "learning_rate": 1.6152e-05, "loss": 0.102, "step": 5387 }, { "epoch": 30.270422535211267, "grad_norm": 0.5469638705253601, "learning_rate": 1.6155e-05, "loss": 0.096, "step": 5388 }, { "epoch": 30.27605633802817, "grad_norm": 0.5595445036888123, "learning_rate": 1.6158e-05, "loss": 0.0902, "step": 5389 }, { "epoch": 30.281690140845072, "grad_norm": 0.5869790315628052, "learning_rate": 1.6161e-05, "loss": 0.0565, "step": 5390 }, { "epoch": 30.28732394366197, "grad_norm": 0.5362108945846558, "learning_rate": 1.6164e-05, "loss": 0.0814, "step": 5391 }, { "epoch": 30.292957746478873, "grad_norm": 0.5522419810295105, "learning_rate": 1.6167000000000003e-05, "loss": 0.082, "step": 5392 }, { "epoch": 30.298591549295775, "grad_norm": 0.627889096736908, "learning_rate": 1.6170000000000003e-05, "loss": 0.0672, "step": 5393 }, { "epoch": 30.304225352112677, "grad_norm": 0.6336875557899475, "learning_rate": 1.6173000000000003e-05, "loss": 0.1089, "step": 5394 }, { "epoch": 30.309859154929576, "grad_norm": 0.46705207228660583, "learning_rate": 1.6176000000000002e-05, "loss": 0.0573, "step": 5395 }, { "epoch": 30.31549295774648, "grad_norm": 0.7780168652534485, "learning_rate": 1.6179000000000002e-05, "loss": 0.0781, "step": 5396 }, { "epoch": 30.32112676056338, "grad_norm": 0.48164382576942444, "learning_rate": 1.6182e-05, "loss": 0.1219, "step": 5397 }, { "epoch": 30.326760563380283, "grad_norm": 0.548342764377594, "learning_rate": 1.6185e-05, "loss": 0.0437, "step": 5398 }, { "epoch": 30.33239436619718, "grad_norm": 0.7448260188102722, "learning_rate": 1.6187999999999998e-05, "loss": 0.0669, "step": 5399 }, { "epoch": 30.338028169014084, "grad_norm": 0.38380417227745056, "learning_rate": 1.6190999999999998e-05, "loss": 0.0368, "step": 5400 }, { "epoch": 30.343661971830986, "grad_norm": 0.5380787253379822, "learning_rate": 1.6193999999999998e-05, "loss": 0.0583, "step": 5401 }, { "epoch": 30.34929577464789, "grad_norm": 0.5351355075836182, "learning_rate": 1.6197e-05, "loss": 0.0685, "step": 5402 }, { "epoch": 30.354929577464787, "grad_norm": 0.7340309619903564, "learning_rate": 1.62e-05, "loss": 0.0502, "step": 5403 }, { "epoch": 30.36056338028169, "grad_norm": 0.5732718706130981, "learning_rate": 1.6203e-05, "loss": 0.0677, "step": 5404 }, { "epoch": 30.366197183098592, "grad_norm": 0.7278108596801758, "learning_rate": 1.6206e-05, "loss": 0.068, "step": 5405 }, { "epoch": 30.371830985915494, "grad_norm": 0.6379052996635437, "learning_rate": 1.6209e-05, "loss": 0.0559, "step": 5406 }, { "epoch": 30.377464788732393, "grad_norm": 0.46398764848709106, "learning_rate": 1.6212e-05, "loss": 0.0845, "step": 5407 }, { "epoch": 30.383098591549295, "grad_norm": 0.470272958278656, "learning_rate": 1.6215e-05, "loss": 0.0276, "step": 5408 }, { "epoch": 30.388732394366198, "grad_norm": 0.5150623321533203, "learning_rate": 1.6218e-05, "loss": 0.0748, "step": 5409 }, { "epoch": 30.3943661971831, "grad_norm": 0.5461838841438293, "learning_rate": 1.6221e-05, "loss": 0.0547, "step": 5410 }, { "epoch": 30.4, "grad_norm": 0.42636141180992126, "learning_rate": 1.6224e-05, "loss": 0.0329, "step": 5411 }, { "epoch": 30.4056338028169, "grad_norm": 0.4465949535369873, "learning_rate": 1.6227000000000002e-05, "loss": 0.027, "step": 5412 }, { "epoch": 30.411267605633803, "grad_norm": 0.4395632743835449, "learning_rate": 1.6230000000000002e-05, "loss": 0.0364, "step": 5413 }, { "epoch": 30.416901408450705, "grad_norm": 0.426990270614624, "learning_rate": 1.6233000000000002e-05, "loss": 0.0289, "step": 5414 }, { "epoch": 30.422535211267604, "grad_norm": 0.433041512966156, "learning_rate": 1.6236000000000002e-05, "loss": 0.0234, "step": 5415 }, { "epoch": 30.428169014084506, "grad_norm": 0.4212591052055359, "learning_rate": 1.6239e-05, "loss": 0.0319, "step": 5416 }, { "epoch": 30.43380281690141, "grad_norm": 0.8807299137115479, "learning_rate": 1.6242e-05, "loss": 0.0421, "step": 5417 }, { "epoch": 30.43943661971831, "grad_norm": 0.44961339235305786, "learning_rate": 1.6245e-05, "loss": 0.0392, "step": 5418 }, { "epoch": 30.44507042253521, "grad_norm": 2.552119493484497, "learning_rate": 1.6248e-05, "loss": 0.0617, "step": 5419 }, { "epoch": 30.450704225352112, "grad_norm": 0.5384565591812134, "learning_rate": 1.6251e-05, "loss": 0.0462, "step": 5420 }, { "epoch": 30.456338028169014, "grad_norm": 0.3396431803703308, "learning_rate": 1.6253999999999997e-05, "loss": 0.0454, "step": 5421 }, { "epoch": 30.461971830985917, "grad_norm": 0.39261817932128906, "learning_rate": 1.6257e-05, "loss": 0.0509, "step": 5422 }, { "epoch": 30.467605633802815, "grad_norm": 0.5793818235397339, "learning_rate": 1.626e-05, "loss": 0.0559, "step": 5423 }, { "epoch": 30.473239436619718, "grad_norm": 0.5862243175506592, "learning_rate": 1.6263e-05, "loss": 0.0591, "step": 5424 }, { "epoch": 30.47887323943662, "grad_norm": 0.536348819732666, "learning_rate": 1.6266e-05, "loss": 0.0373, "step": 5425 }, { "epoch": 30.484507042253522, "grad_norm": 0.34656569361686707, "learning_rate": 1.6269e-05, "loss": 0.0175, "step": 5426 }, { "epoch": 30.49014084507042, "grad_norm": 0.6657776236534119, "learning_rate": 1.6272e-05, "loss": 0.0225, "step": 5427 }, { "epoch": 30.495774647887323, "grad_norm": 0.4389156103134155, "learning_rate": 1.6275e-05, "loss": 0.0226, "step": 5428 }, { "epoch": 30.501408450704226, "grad_norm": 1.406935214996338, "learning_rate": 1.6278e-05, "loss": 0.1547, "step": 5429 }, { "epoch": 30.507042253521128, "grad_norm": 0.7375538945198059, "learning_rate": 1.6281e-05, "loss": 0.1941, "step": 5430 }, { "epoch": 30.512676056338027, "grad_norm": 0.6257545948028564, "learning_rate": 1.6284e-05, "loss": 0.1065, "step": 5431 }, { "epoch": 30.51830985915493, "grad_norm": 0.7130341529846191, "learning_rate": 1.6287000000000002e-05, "loss": 0.1315, "step": 5432 }, { "epoch": 30.52394366197183, "grad_norm": 0.9262415766716003, "learning_rate": 1.629e-05, "loss": 0.1591, "step": 5433 }, { "epoch": 30.529577464788733, "grad_norm": 0.8354769945144653, "learning_rate": 1.6293e-05, "loss": 0.1129, "step": 5434 }, { "epoch": 30.535211267605632, "grad_norm": 0.6109352111816406, "learning_rate": 1.6296e-05, "loss": 0.0856, "step": 5435 }, { "epoch": 30.540845070422534, "grad_norm": 0.6286054849624634, "learning_rate": 1.6299e-05, "loss": 0.0777, "step": 5436 }, { "epoch": 30.546478873239437, "grad_norm": 0.6884949207305908, "learning_rate": 1.6302e-05, "loss": 0.1401, "step": 5437 }, { "epoch": 30.55211267605634, "grad_norm": 0.5786027908325195, "learning_rate": 1.6305e-05, "loss": 0.0584, "step": 5438 }, { "epoch": 30.557746478873238, "grad_norm": 0.5973263382911682, "learning_rate": 1.6308e-05, "loss": 0.0661, "step": 5439 }, { "epoch": 30.56338028169014, "grad_norm": 0.5557727813720703, "learning_rate": 1.6311e-05, "loss": 0.0617, "step": 5440 }, { "epoch": 30.569014084507042, "grad_norm": 0.635882556438446, "learning_rate": 1.6314e-05, "loss": 0.0894, "step": 5441 }, { "epoch": 30.574647887323945, "grad_norm": 0.6141785383224487, "learning_rate": 1.6317000000000003e-05, "loss": 0.1076, "step": 5442 }, { "epoch": 30.580281690140843, "grad_norm": 0.4961663782596588, "learning_rate": 1.6320000000000003e-05, "loss": 0.0517, "step": 5443 }, { "epoch": 30.585915492957746, "grad_norm": 1.0339906215667725, "learning_rate": 1.6323000000000003e-05, "loss": 0.0999, "step": 5444 }, { "epoch": 30.591549295774648, "grad_norm": 0.6006290316581726, "learning_rate": 1.6326000000000003e-05, "loss": 0.0966, "step": 5445 }, { "epoch": 30.59718309859155, "grad_norm": 0.8180571794509888, "learning_rate": 1.6329e-05, "loss": 0.0498, "step": 5446 }, { "epoch": 30.60281690140845, "grad_norm": 0.5580424666404724, "learning_rate": 1.6332e-05, "loss": 0.0441, "step": 5447 }, { "epoch": 30.60845070422535, "grad_norm": 0.4199526607990265, "learning_rate": 1.6335e-05, "loss": 0.0352, "step": 5448 }, { "epoch": 30.614084507042254, "grad_norm": 0.5443974137306213, "learning_rate": 1.6338e-05, "loss": 0.0468, "step": 5449 }, { "epoch": 30.619718309859156, "grad_norm": 0.7029668092727661, "learning_rate": 1.6340999999999998e-05, "loss": 0.0861, "step": 5450 }, { "epoch": 30.625352112676055, "grad_norm": 0.6604536175727844, "learning_rate": 1.6343999999999998e-05, "loss": 0.0492, "step": 5451 }, { "epoch": 30.630985915492957, "grad_norm": 1.1525356769561768, "learning_rate": 1.6347e-05, "loss": 0.0411, "step": 5452 }, { "epoch": 30.63661971830986, "grad_norm": 0.49697691202163696, "learning_rate": 1.635e-05, "loss": 0.0277, "step": 5453 }, { "epoch": 30.64225352112676, "grad_norm": 0.44127506017684937, "learning_rate": 1.6353e-05, "loss": 0.0265, "step": 5454 }, { "epoch": 30.647887323943664, "grad_norm": 0.6573490500450134, "learning_rate": 1.6356e-05, "loss": 0.0407, "step": 5455 }, { "epoch": 30.653521126760563, "grad_norm": 0.5367727875709534, "learning_rate": 1.6359e-05, "loss": 0.0387, "step": 5456 }, { "epoch": 30.659154929577465, "grad_norm": 0.7224960327148438, "learning_rate": 1.6362e-05, "loss": 0.0385, "step": 5457 }, { "epoch": 30.664788732394367, "grad_norm": 0.5780273079872131, "learning_rate": 1.6365e-05, "loss": 0.0417, "step": 5458 }, { "epoch": 30.670422535211266, "grad_norm": 0.5854515433311462, "learning_rate": 1.6368e-05, "loss": 0.0383, "step": 5459 }, { "epoch": 30.676056338028168, "grad_norm": 0.5561653971672058, "learning_rate": 1.6371e-05, "loss": 0.0198, "step": 5460 }, { "epoch": 30.68169014084507, "grad_norm": 0.44728657603263855, "learning_rate": 1.6374e-05, "loss": 0.0334, "step": 5461 }, { "epoch": 30.687323943661973, "grad_norm": 1.050407886505127, "learning_rate": 1.6377000000000003e-05, "loss": 0.0314, "step": 5462 }, { "epoch": 30.692957746478875, "grad_norm": 0.8696084022521973, "learning_rate": 1.6380000000000002e-05, "loss": 0.0433, "step": 5463 }, { "epoch": 30.698591549295774, "grad_norm": 0.7645071148872375, "learning_rate": 1.6383000000000002e-05, "loss": 0.0334, "step": 5464 }, { "epoch": 30.704225352112676, "grad_norm": 0.5855281949043274, "learning_rate": 1.6386000000000002e-05, "loss": 0.0343, "step": 5465 }, { "epoch": 30.70985915492958, "grad_norm": 0.40521085262298584, "learning_rate": 1.6389000000000002e-05, "loss": 0.0174, "step": 5466 }, { "epoch": 30.71549295774648, "grad_norm": 0.4569852948188782, "learning_rate": 1.6392e-05, "loss": 0.0233, "step": 5467 }, { "epoch": 30.72112676056338, "grad_norm": 1.0660085678100586, "learning_rate": 1.6395e-05, "loss": 0.0527, "step": 5468 }, { "epoch": 30.72676056338028, "grad_norm": 0.4140886664390564, "learning_rate": 1.6398e-05, "loss": 0.0175, "step": 5469 }, { "epoch": 30.732394366197184, "grad_norm": 0.47902563214302063, "learning_rate": 1.6400999999999998e-05, "loss": 0.0202, "step": 5470 }, { "epoch": 30.738028169014086, "grad_norm": 0.6494336128234863, "learning_rate": 1.6403999999999997e-05, "loss": 0.0315, "step": 5471 }, { "epoch": 30.743661971830985, "grad_norm": 0.5888350009918213, "learning_rate": 1.6407e-05, "loss": 0.0465, "step": 5472 }, { "epoch": 30.749295774647887, "grad_norm": 0.909765899181366, "learning_rate": 1.641e-05, "loss": 0.1777, "step": 5473 }, { "epoch": 30.75492957746479, "grad_norm": 0.6216979026794434, "learning_rate": 1.6413e-05, "loss": 0.11, "step": 5474 }, { "epoch": 30.760563380281692, "grad_norm": 0.6225910782814026, "learning_rate": 1.6416e-05, "loss": 0.1305, "step": 5475 }, { "epoch": 30.76619718309859, "grad_norm": 0.8191976547241211, "learning_rate": 1.6419e-05, "loss": 0.0981, "step": 5476 }, { "epoch": 30.771830985915493, "grad_norm": 0.6362526416778564, "learning_rate": 1.6422e-05, "loss": 0.0933, "step": 5477 }, { "epoch": 30.777464788732395, "grad_norm": 0.6285766363143921, "learning_rate": 1.6425e-05, "loss": 0.1327, "step": 5478 }, { "epoch": 30.783098591549297, "grad_norm": 0.9806628823280334, "learning_rate": 1.6428e-05, "loss": 0.1347, "step": 5479 }, { "epoch": 30.788732394366196, "grad_norm": 0.5595918297767639, "learning_rate": 1.6431e-05, "loss": 0.1133, "step": 5480 }, { "epoch": 30.7943661971831, "grad_norm": 0.5241599082946777, "learning_rate": 1.6434e-05, "loss": 0.0716, "step": 5481 }, { "epoch": 30.8, "grad_norm": 0.6550946235656738, "learning_rate": 1.6437000000000002e-05, "loss": 0.0806, "step": 5482 }, { "epoch": 30.805633802816903, "grad_norm": 0.5672236680984497, "learning_rate": 1.6440000000000002e-05, "loss": 0.0646, "step": 5483 }, { "epoch": 30.8112676056338, "grad_norm": 0.8884251117706299, "learning_rate": 1.6443e-05, "loss": 0.0803, "step": 5484 }, { "epoch": 30.816901408450704, "grad_norm": 0.6487343907356262, "learning_rate": 1.6446e-05, "loss": 0.0716, "step": 5485 }, { "epoch": 30.822535211267606, "grad_norm": 0.8901031613349915, "learning_rate": 1.6449e-05, "loss": 0.0887, "step": 5486 }, { "epoch": 30.82816901408451, "grad_norm": 0.6457711458206177, "learning_rate": 1.6452e-05, "loss": 0.0598, "step": 5487 }, { "epoch": 30.833802816901407, "grad_norm": 0.5706002712249756, "learning_rate": 1.6455e-05, "loss": 0.0968, "step": 5488 }, { "epoch": 30.83943661971831, "grad_norm": 0.6922225952148438, "learning_rate": 1.6458e-05, "loss": 0.039, "step": 5489 }, { "epoch": 30.845070422535212, "grad_norm": 0.7622706294059753, "learning_rate": 1.6461e-05, "loss": 0.0597, "step": 5490 }, { "epoch": 30.850704225352114, "grad_norm": 0.9649633169174194, "learning_rate": 1.6464e-05, "loss": 0.0736, "step": 5491 }, { "epoch": 30.856338028169013, "grad_norm": 0.42097413539886475, "learning_rate": 1.6467000000000003e-05, "loss": 0.0345, "step": 5492 }, { "epoch": 30.861971830985915, "grad_norm": 0.818750262260437, "learning_rate": 1.6470000000000003e-05, "loss": 0.0329, "step": 5493 }, { "epoch": 30.867605633802818, "grad_norm": 0.6182898283004761, "learning_rate": 1.6473000000000003e-05, "loss": 0.0405, "step": 5494 }, { "epoch": 30.87323943661972, "grad_norm": 0.4412152171134949, "learning_rate": 1.6476e-05, "loss": 0.0495, "step": 5495 }, { "epoch": 30.87887323943662, "grad_norm": 0.43333369493484497, "learning_rate": 1.6479e-05, "loss": 0.0271, "step": 5496 }, { "epoch": 30.88450704225352, "grad_norm": 0.7623053789138794, "learning_rate": 1.6482e-05, "loss": 0.0393, "step": 5497 }, { "epoch": 30.890140845070423, "grad_norm": 0.5976096391677856, "learning_rate": 1.6485e-05, "loss": 0.0384, "step": 5498 }, { "epoch": 30.895774647887325, "grad_norm": 0.649797260761261, "learning_rate": 1.6488e-05, "loss": 0.0393, "step": 5499 }, { "epoch": 30.901408450704224, "grad_norm": 0.43131494522094727, "learning_rate": 1.6491e-05, "loss": 0.0265, "step": 5500 }, { "epoch": 30.907042253521126, "grad_norm": 0.7179993391036987, "learning_rate": 1.6493999999999998e-05, "loss": 0.0401, "step": 5501 }, { "epoch": 30.91267605633803, "grad_norm": 0.6851419806480408, "learning_rate": 1.6497e-05, "loss": 0.075, "step": 5502 }, { "epoch": 30.91830985915493, "grad_norm": 0.7034711241722107, "learning_rate": 1.65e-05, "loss": 0.0273, "step": 5503 }, { "epoch": 30.92394366197183, "grad_norm": 0.8301180601119995, "learning_rate": 1.6503e-05, "loss": 0.1007, "step": 5504 }, { "epoch": 30.929577464788732, "grad_norm": 0.6701732873916626, "learning_rate": 1.6506e-05, "loss": 0.1053, "step": 5505 }, { "epoch": 30.935211267605634, "grad_norm": 0.3807532489299774, "learning_rate": 1.6509e-05, "loss": 0.0436, "step": 5506 }, { "epoch": 30.940845070422537, "grad_norm": 0.6487147808074951, "learning_rate": 1.6512e-05, "loss": 0.027, "step": 5507 }, { "epoch": 30.946478873239435, "grad_norm": 0.4529869854450226, "learning_rate": 1.6515e-05, "loss": 0.0277, "step": 5508 }, { "epoch": 30.952112676056338, "grad_norm": 0.4650532305240631, "learning_rate": 1.6518e-05, "loss": 0.056, "step": 5509 }, { "epoch": 30.95774647887324, "grad_norm": 0.5410106182098389, "learning_rate": 1.6521e-05, "loss": 0.0213, "step": 5510 }, { "epoch": 30.963380281690142, "grad_norm": 0.4546552002429962, "learning_rate": 1.6524e-05, "loss": 0.0309, "step": 5511 }, { "epoch": 30.96901408450704, "grad_norm": 0.6738647818565369, "learning_rate": 1.6527e-05, "loss": 0.035, "step": 5512 }, { "epoch": 30.974647887323943, "grad_norm": 0.7651129961013794, "learning_rate": 1.6530000000000003e-05, "loss": 0.0149, "step": 5513 }, { "epoch": 30.980281690140846, "grad_norm": 0.4910483658313751, "learning_rate": 1.6533000000000002e-05, "loss": 0.0299, "step": 5514 }, { "epoch": 30.985915492957748, "grad_norm": 0.6404990553855896, "learning_rate": 1.6536000000000002e-05, "loss": 0.0581, "step": 5515 }, { "epoch": 30.991549295774647, "grad_norm": 0.42010411620140076, "learning_rate": 1.6539000000000002e-05, "loss": 0.0351, "step": 5516 }, { "epoch": 30.99718309859155, "grad_norm": 0.6732563376426697, "learning_rate": 1.6542000000000002e-05, "loss": 0.0803, "step": 5517 }, { "epoch": 31.0, "grad_norm": 0.7815874814987183, "learning_rate": 1.6545e-05, "loss": 0.0351, "step": 5518 }, { "epoch": 31.005633802816902, "grad_norm": 0.6296091079711914, "learning_rate": 1.6548e-05, "loss": 0.1521, "step": 5519 }, { "epoch": 31.011267605633805, "grad_norm": 0.6198773384094238, "learning_rate": 1.6550999999999998e-05, "loss": 0.1239, "step": 5520 }, { "epoch": 31.016901408450703, "grad_norm": 0.5338298082351685, "learning_rate": 1.6553999999999998e-05, "loss": 0.089, "step": 5521 }, { "epoch": 31.022535211267606, "grad_norm": 0.7487637996673584, "learning_rate": 1.6556999999999998e-05, "loss": 0.12, "step": 5522 }, { "epoch": 31.028169014084508, "grad_norm": 0.5595770478248596, "learning_rate": 1.656e-05, "loss": 0.1227, "step": 5523 }, { "epoch": 31.03380281690141, "grad_norm": 0.6319732069969177, "learning_rate": 1.6563e-05, "loss": 0.0763, "step": 5524 }, { "epoch": 31.03943661971831, "grad_norm": 0.7329538464546204, "learning_rate": 1.6566e-05, "loss": 0.0719, "step": 5525 }, { "epoch": 31.04507042253521, "grad_norm": 0.6386680006980896, "learning_rate": 1.6569e-05, "loss": 0.0685, "step": 5526 }, { "epoch": 31.050704225352113, "grad_norm": 0.7347157001495361, "learning_rate": 1.6572e-05, "loss": 0.1138, "step": 5527 }, { "epoch": 31.056338028169016, "grad_norm": 0.45504555106163025, "learning_rate": 1.6575e-05, "loss": 0.0546, "step": 5528 }, { "epoch": 31.061971830985915, "grad_norm": 0.7058091163635254, "learning_rate": 1.6578e-05, "loss": 0.0817, "step": 5529 }, { "epoch": 31.067605633802817, "grad_norm": 0.5219041705131531, "learning_rate": 1.6581e-05, "loss": 0.0576, "step": 5530 }, { "epoch": 31.07323943661972, "grad_norm": 0.8094112873077393, "learning_rate": 1.6584e-05, "loss": 0.0919, "step": 5531 }, { "epoch": 31.07887323943662, "grad_norm": 0.6002070903778076, "learning_rate": 1.6587e-05, "loss": 0.049, "step": 5532 }, { "epoch": 31.08450704225352, "grad_norm": 0.6108466386795044, "learning_rate": 1.6590000000000002e-05, "loss": 0.0743, "step": 5533 }, { "epoch": 31.090140845070422, "grad_norm": 0.39403384923934937, "learning_rate": 1.6593000000000002e-05, "loss": 0.0459, "step": 5534 }, { "epoch": 31.095774647887325, "grad_norm": 0.506692111492157, "learning_rate": 1.6596000000000002e-05, "loss": 0.0587, "step": 5535 }, { "epoch": 31.101408450704227, "grad_norm": 0.6455670595169067, "learning_rate": 1.6599e-05, "loss": 0.0754, "step": 5536 }, { "epoch": 31.107042253521126, "grad_norm": 0.45101431012153625, "learning_rate": 1.6602e-05, "loss": 0.0304, "step": 5537 }, { "epoch": 31.112676056338028, "grad_norm": 0.5887389779090881, "learning_rate": 1.6605e-05, "loss": 0.0612, "step": 5538 }, { "epoch": 31.11830985915493, "grad_norm": 0.387766569852829, "learning_rate": 1.6608e-05, "loss": 0.0358, "step": 5539 }, { "epoch": 31.123943661971833, "grad_norm": 0.6235900521278381, "learning_rate": 1.6611e-05, "loss": 0.0343, "step": 5540 }, { "epoch": 31.12957746478873, "grad_norm": 0.7341621518135071, "learning_rate": 1.6614e-05, "loss": 0.0303, "step": 5541 }, { "epoch": 31.135211267605634, "grad_norm": 0.711798906326294, "learning_rate": 1.6617e-05, "loss": 0.0468, "step": 5542 }, { "epoch": 31.140845070422536, "grad_norm": 0.4935031831264496, "learning_rate": 1.6620000000000004e-05, "loss": 0.0245, "step": 5543 }, { "epoch": 31.146478873239438, "grad_norm": 0.46804335713386536, "learning_rate": 1.6623e-05, "loss": 0.021, "step": 5544 }, { "epoch": 31.152112676056337, "grad_norm": 0.9351961612701416, "learning_rate": 1.6626e-05, "loss": 0.0941, "step": 5545 }, { "epoch": 31.15774647887324, "grad_norm": 0.3474634289741516, "learning_rate": 1.6629e-05, "loss": 0.0197, "step": 5546 }, { "epoch": 31.16338028169014, "grad_norm": 0.6768332719802856, "learning_rate": 1.6632e-05, "loss": 0.0503, "step": 5547 }, { "epoch": 31.169014084507044, "grad_norm": 0.48266592621803284, "learning_rate": 1.6635e-05, "loss": 0.0386, "step": 5548 }, { "epoch": 31.174647887323943, "grad_norm": 0.42798465490341187, "learning_rate": 1.6638e-05, "loss": 0.0304, "step": 5549 }, { "epoch": 31.180281690140845, "grad_norm": 0.7782386541366577, "learning_rate": 1.6641e-05, "loss": 0.0559, "step": 5550 }, { "epoch": 31.185915492957747, "grad_norm": 0.4926592707633972, "learning_rate": 1.6644e-05, "loss": 0.0429, "step": 5551 }, { "epoch": 31.19154929577465, "grad_norm": 0.5420024991035461, "learning_rate": 1.6647e-05, "loss": 0.0391, "step": 5552 }, { "epoch": 31.197183098591548, "grad_norm": 0.6278807520866394, "learning_rate": 1.665e-05, "loss": 0.0535, "step": 5553 }, { "epoch": 31.20281690140845, "grad_norm": 0.4035853147506714, "learning_rate": 1.6653e-05, "loss": 0.0359, "step": 5554 }, { "epoch": 31.208450704225353, "grad_norm": 0.4597330093383789, "learning_rate": 1.6656e-05, "loss": 0.0134, "step": 5555 }, { "epoch": 31.214084507042255, "grad_norm": 0.33946409821510315, "learning_rate": 1.6659e-05, "loss": 0.0181, "step": 5556 }, { "epoch": 31.219718309859154, "grad_norm": 0.5098097920417786, "learning_rate": 1.6662e-05, "loss": 0.0547, "step": 5557 }, { "epoch": 31.225352112676056, "grad_norm": 0.42728012800216675, "learning_rate": 1.6665e-05, "loss": 0.0398, "step": 5558 }, { "epoch": 31.23098591549296, "grad_norm": 0.4220256209373474, "learning_rate": 1.6668e-05, "loss": 0.0144, "step": 5559 }, { "epoch": 31.23661971830986, "grad_norm": 1.2545212507247925, "learning_rate": 1.6671e-05, "loss": 0.1155, "step": 5560 }, { "epoch": 31.24225352112676, "grad_norm": 0.5287292003631592, "learning_rate": 1.6674e-05, "loss": 0.0376, "step": 5561 }, { "epoch": 31.24788732394366, "grad_norm": 0.5707404017448425, "learning_rate": 1.6677e-05, "loss": 0.0343, "step": 5562 }, { "epoch": 31.253521126760564, "grad_norm": 0.9593307971954346, "learning_rate": 1.6680000000000003e-05, "loss": 0.1699, "step": 5563 }, { "epoch": 31.259154929577466, "grad_norm": 0.6115437746047974, "learning_rate": 1.6683000000000003e-05, "loss": 0.1019, "step": 5564 }, { "epoch": 31.264788732394365, "grad_norm": 0.6940182447433472, "learning_rate": 1.6686000000000003e-05, "loss": 0.1085, "step": 5565 }, { "epoch": 31.270422535211267, "grad_norm": 0.5733916759490967, "learning_rate": 1.6689000000000002e-05, "loss": 0.1232, "step": 5566 }, { "epoch": 31.27605633802817, "grad_norm": 0.6373168230056763, "learning_rate": 1.6692000000000002e-05, "loss": 0.0678, "step": 5567 }, { "epoch": 31.281690140845072, "grad_norm": 0.6498945951461792, "learning_rate": 1.6695000000000002e-05, "loss": 0.0863, "step": 5568 }, { "epoch": 31.28732394366197, "grad_norm": 1.2955653667449951, "learning_rate": 1.6698e-05, "loss": 0.1595, "step": 5569 }, { "epoch": 31.292957746478873, "grad_norm": 0.7084673047065735, "learning_rate": 1.6700999999999998e-05, "loss": 0.0977, "step": 5570 }, { "epoch": 31.298591549295775, "grad_norm": 0.6492661833763123, "learning_rate": 1.6703999999999998e-05, "loss": 0.0642, "step": 5571 }, { "epoch": 31.304225352112677, "grad_norm": 0.5929644107818604, "learning_rate": 1.6706999999999998e-05, "loss": 0.0765, "step": 5572 }, { "epoch": 31.309859154929576, "grad_norm": 0.48176151514053345, "learning_rate": 1.671e-05, "loss": 0.0669, "step": 5573 }, { "epoch": 31.31549295774648, "grad_norm": 0.5992491841316223, "learning_rate": 1.6713e-05, "loss": 0.0657, "step": 5574 }, { "epoch": 31.32112676056338, "grad_norm": 0.643916130065918, "learning_rate": 1.6716e-05, "loss": 0.1164, "step": 5575 }, { "epoch": 31.326760563380283, "grad_norm": 0.4492989778518677, "learning_rate": 1.6719e-05, "loss": 0.048, "step": 5576 }, { "epoch": 31.33239436619718, "grad_norm": 0.3729010820388794, "learning_rate": 1.6722e-05, "loss": 0.0323, "step": 5577 }, { "epoch": 31.338028169014084, "grad_norm": 0.4606415331363678, "learning_rate": 1.6725e-05, "loss": 0.0339, "step": 5578 }, { "epoch": 31.343661971830986, "grad_norm": 0.6883161067962646, "learning_rate": 1.6728e-05, "loss": 0.0897, "step": 5579 }, { "epoch": 31.34929577464789, "grad_norm": 0.8938865661621094, "learning_rate": 1.6731e-05, "loss": 0.05, "step": 5580 }, { "epoch": 31.354929577464787, "grad_norm": 1.1559032201766968, "learning_rate": 1.6734e-05, "loss": 0.0381, "step": 5581 }, { "epoch": 31.36056338028169, "grad_norm": 0.5526645183563232, "learning_rate": 1.6737e-05, "loss": 0.0607, "step": 5582 }, { "epoch": 31.366197183098592, "grad_norm": 0.5124894380569458, "learning_rate": 1.6740000000000002e-05, "loss": 0.0393, "step": 5583 }, { "epoch": 31.371830985915494, "grad_norm": 0.8438053131103516, "learning_rate": 1.6743000000000002e-05, "loss": 0.05, "step": 5584 }, { "epoch": 31.377464788732393, "grad_norm": 1.057585597038269, "learning_rate": 1.6746000000000002e-05, "loss": 0.0768, "step": 5585 }, { "epoch": 31.383098591549295, "grad_norm": 0.7559165954589844, "learning_rate": 1.6749000000000002e-05, "loss": 0.0251, "step": 5586 }, { "epoch": 31.388732394366198, "grad_norm": 0.5341786742210388, "learning_rate": 1.6752e-05, "loss": 0.0567, "step": 5587 }, { "epoch": 31.3943661971831, "grad_norm": 0.5124914050102234, "learning_rate": 1.6755e-05, "loss": 0.0407, "step": 5588 }, { "epoch": 31.4, "grad_norm": 0.5119737982749939, "learning_rate": 1.6758e-05, "loss": 0.036, "step": 5589 }, { "epoch": 31.4056338028169, "grad_norm": 0.5871683955192566, "learning_rate": 1.6761e-05, "loss": 0.0355, "step": 5590 }, { "epoch": 31.411267605633803, "grad_norm": 1.0471843481063843, "learning_rate": 1.6764e-05, "loss": 0.0344, "step": 5591 }, { "epoch": 31.416901408450705, "grad_norm": 0.4662672281265259, "learning_rate": 1.6767e-05, "loss": 0.0522, "step": 5592 }, { "epoch": 31.422535211267604, "grad_norm": 1.1248334646224976, "learning_rate": 1.677e-05, "loss": 0.0555, "step": 5593 }, { "epoch": 31.428169014084506, "grad_norm": 0.9533230066299438, "learning_rate": 1.6773e-05, "loss": 0.0915, "step": 5594 }, { "epoch": 31.43380281690141, "grad_norm": 1.921445369720459, "learning_rate": 1.6776e-05, "loss": 0.1039, "step": 5595 }, { "epoch": 31.43943661971831, "grad_norm": 0.9249076247215271, "learning_rate": 1.6779e-05, "loss": 0.0417, "step": 5596 }, { "epoch": 31.44507042253521, "grad_norm": 0.5511620044708252, "learning_rate": 1.6782e-05, "loss": 0.0507, "step": 5597 }, { "epoch": 31.450704225352112, "grad_norm": 0.47786372900009155, "learning_rate": 1.6785e-05, "loss": 0.0313, "step": 5598 }, { "epoch": 31.456338028169014, "grad_norm": 0.8232679963111877, "learning_rate": 1.6788e-05, "loss": 0.0863, "step": 5599 }, { "epoch": 31.461971830985917, "grad_norm": 0.44214072823524475, "learning_rate": 1.6791e-05, "loss": 0.0165, "step": 5600 }, { "epoch": 31.467605633802815, "grad_norm": 0.45216819643974304, "learning_rate": 1.6794e-05, "loss": 0.0153, "step": 5601 }, { "epoch": 31.473239436619718, "grad_norm": 0.611535906791687, "learning_rate": 1.6797e-05, "loss": 0.0214, "step": 5602 }, { "epoch": 31.47887323943662, "grad_norm": 0.5128481984138489, "learning_rate": 1.6800000000000002e-05, "loss": 0.0319, "step": 5603 }, { "epoch": 31.484507042253522, "grad_norm": 0.5371065735816956, "learning_rate": 1.6803e-05, "loss": 0.0193, "step": 5604 }, { "epoch": 31.49014084507042, "grad_norm": 0.6659985184669495, "learning_rate": 1.6806e-05, "loss": 0.0397, "step": 5605 }, { "epoch": 31.495774647887323, "grad_norm": 0.5082145929336548, "learning_rate": 1.6809e-05, "loss": 0.0252, "step": 5606 }, { "epoch": 31.501408450704226, "grad_norm": 1.429542064666748, "learning_rate": 1.6812e-05, "loss": 0.1829, "step": 5607 }, { "epoch": 31.507042253521128, "grad_norm": 0.5824711322784424, "learning_rate": 1.6815e-05, "loss": 0.1018, "step": 5608 }, { "epoch": 31.512676056338027, "grad_norm": 0.5768731236457825, "learning_rate": 1.6818e-05, "loss": 0.1213, "step": 5609 }, { "epoch": 31.51830985915493, "grad_norm": 0.6929510235786438, "learning_rate": 1.6821e-05, "loss": 0.0975, "step": 5610 }, { "epoch": 31.52394366197183, "grad_norm": 0.7835416793823242, "learning_rate": 1.6824e-05, "loss": 0.1194, "step": 5611 }, { "epoch": 31.529577464788733, "grad_norm": 0.7012468576431274, "learning_rate": 1.6827e-05, "loss": 0.1241, "step": 5612 }, { "epoch": 31.535211267605632, "grad_norm": 0.6591938734054565, "learning_rate": 1.6830000000000003e-05, "loss": 0.0758, "step": 5613 }, { "epoch": 31.540845070422534, "grad_norm": 0.5701854825019836, "learning_rate": 1.6833000000000003e-05, "loss": 0.1183, "step": 5614 }, { "epoch": 31.546478873239437, "grad_norm": 1.5087084770202637, "learning_rate": 1.6836000000000003e-05, "loss": 0.0905, "step": 5615 }, { "epoch": 31.55211267605634, "grad_norm": 0.7833205461502075, "learning_rate": 1.6839000000000003e-05, "loss": 0.0677, "step": 5616 }, { "epoch": 31.557746478873238, "grad_norm": 0.8777860999107361, "learning_rate": 1.6842000000000002e-05, "loss": 0.0955, "step": 5617 }, { "epoch": 31.56338028169014, "grad_norm": 0.5863977074623108, "learning_rate": 1.6845e-05, "loss": 0.0641, "step": 5618 }, { "epoch": 31.569014084507042, "grad_norm": 0.6005957722663879, "learning_rate": 1.6848e-05, "loss": 0.0609, "step": 5619 }, { "epoch": 31.574647887323945, "grad_norm": 0.6444336771965027, "learning_rate": 1.6851e-05, "loss": 0.0465, "step": 5620 }, { "epoch": 31.580281690140843, "grad_norm": 0.6685821413993835, "learning_rate": 1.6853999999999998e-05, "loss": 0.0823, "step": 5621 }, { "epoch": 31.585915492957746, "grad_norm": 0.5449960827827454, "learning_rate": 1.6856999999999998e-05, "loss": 0.0458, "step": 5622 }, { "epoch": 31.591549295774648, "grad_norm": 0.6621996164321899, "learning_rate": 1.686e-05, "loss": 0.0367, "step": 5623 }, { "epoch": 31.59718309859155, "grad_norm": 0.5373045802116394, "learning_rate": 1.6863e-05, "loss": 0.046, "step": 5624 }, { "epoch": 31.60281690140845, "grad_norm": 0.45466578006744385, "learning_rate": 1.6866e-05, "loss": 0.0443, "step": 5625 }, { "epoch": 31.60845070422535, "grad_norm": 0.6236240267753601, "learning_rate": 1.6869e-05, "loss": 0.0396, "step": 5626 }, { "epoch": 31.614084507042254, "grad_norm": 0.46689021587371826, "learning_rate": 1.6872e-05, "loss": 0.0656, "step": 5627 }, { "epoch": 31.619718309859156, "grad_norm": 0.5769214034080505, "learning_rate": 1.6875e-05, "loss": 0.0291, "step": 5628 }, { "epoch": 31.625352112676055, "grad_norm": 0.5204440355300903, "learning_rate": 1.6878e-05, "loss": 0.0544, "step": 5629 }, { "epoch": 31.630985915492957, "grad_norm": 0.503851592540741, "learning_rate": 1.6881e-05, "loss": 0.0345, "step": 5630 }, { "epoch": 31.63661971830986, "grad_norm": 0.5820248126983643, "learning_rate": 1.6884e-05, "loss": 0.0291, "step": 5631 }, { "epoch": 31.64225352112676, "grad_norm": 0.7093707919120789, "learning_rate": 1.6887e-05, "loss": 0.067, "step": 5632 }, { "epoch": 31.647887323943664, "grad_norm": 0.44341233372688293, "learning_rate": 1.689e-05, "loss": 0.034, "step": 5633 }, { "epoch": 31.653521126760563, "grad_norm": 0.5959410071372986, "learning_rate": 1.6893000000000002e-05, "loss": 0.0341, "step": 5634 }, { "epoch": 31.659154929577465, "grad_norm": 0.49910441040992737, "learning_rate": 1.6896000000000002e-05, "loss": 0.0286, "step": 5635 }, { "epoch": 31.664788732394367, "grad_norm": 0.49249735474586487, "learning_rate": 1.6899000000000002e-05, "loss": 0.0273, "step": 5636 }, { "epoch": 31.670422535211266, "grad_norm": 0.44564536213874817, "learning_rate": 1.6902000000000002e-05, "loss": 0.0317, "step": 5637 }, { "epoch": 31.676056338028168, "grad_norm": 0.41193878650665283, "learning_rate": 1.6905e-05, "loss": 0.0201, "step": 5638 }, { "epoch": 31.68169014084507, "grad_norm": 0.3091217577457428, "learning_rate": 1.6908e-05, "loss": 0.0142, "step": 5639 }, { "epoch": 31.687323943661973, "grad_norm": 0.5937895178794861, "learning_rate": 1.6911e-05, "loss": 0.0202, "step": 5640 }, { "epoch": 31.692957746478875, "grad_norm": 0.5054255127906799, "learning_rate": 1.6914e-05, "loss": 0.0188, "step": 5641 }, { "epoch": 31.698591549295774, "grad_norm": 0.5073084235191345, "learning_rate": 1.6916999999999997e-05, "loss": 0.0342, "step": 5642 }, { "epoch": 31.704225352112676, "grad_norm": 0.5873405337333679, "learning_rate": 1.6919999999999997e-05, "loss": 0.0238, "step": 5643 }, { "epoch": 31.70985915492958, "grad_norm": 0.7361029386520386, "learning_rate": 1.6923e-05, "loss": 0.0448, "step": 5644 }, { "epoch": 31.71549295774648, "grad_norm": 0.7830391526222229, "learning_rate": 1.6926e-05, "loss": 0.0529, "step": 5645 }, { "epoch": 31.72112676056338, "grad_norm": 1.3506526947021484, "learning_rate": 1.6929e-05, "loss": 0.06, "step": 5646 }, { "epoch": 31.72676056338028, "grad_norm": 0.4994133710861206, "learning_rate": 1.6932e-05, "loss": 0.0186, "step": 5647 }, { "epoch": 31.732394366197184, "grad_norm": 0.37078216671943665, "learning_rate": 1.6935e-05, "loss": 0.0161, "step": 5648 }, { "epoch": 31.738028169014086, "grad_norm": 0.5962488055229187, "learning_rate": 1.6938e-05, "loss": 0.0285, "step": 5649 }, { "epoch": 31.743661971830985, "grad_norm": 0.45387330651283264, "learning_rate": 1.6941e-05, "loss": 0.0188, "step": 5650 }, { "epoch": 31.749295774647887, "grad_norm": 0.6945372819900513, "learning_rate": 1.6944e-05, "loss": 0.157, "step": 5651 }, { "epoch": 31.75492957746479, "grad_norm": 0.55483078956604, "learning_rate": 1.6947e-05, "loss": 0.0943, "step": 5652 }, { "epoch": 31.760563380281692, "grad_norm": 0.5834464430809021, "learning_rate": 1.695e-05, "loss": 0.1055, "step": 5653 }, { "epoch": 31.76619718309859, "grad_norm": 0.6512880325317383, "learning_rate": 1.6953000000000002e-05, "loss": 0.0914, "step": 5654 }, { "epoch": 31.771830985915493, "grad_norm": 0.72260981798172, "learning_rate": 1.6956e-05, "loss": 0.1326, "step": 5655 }, { "epoch": 31.777464788732395, "grad_norm": 0.6703841090202332, "learning_rate": 1.6959e-05, "loss": 0.1118, "step": 5656 }, { "epoch": 31.783098591549297, "grad_norm": 0.5987042784690857, "learning_rate": 1.6962e-05, "loss": 0.0743, "step": 5657 }, { "epoch": 31.788732394366196, "grad_norm": 0.8646307587623596, "learning_rate": 1.6965e-05, "loss": 0.1448, "step": 5658 }, { "epoch": 31.7943661971831, "grad_norm": 0.6654988527297974, "learning_rate": 1.6968e-05, "loss": 0.0723, "step": 5659 }, { "epoch": 31.8, "grad_norm": 0.49199071526527405, "learning_rate": 1.6971e-05, "loss": 0.0601, "step": 5660 }, { "epoch": 31.805633802816903, "grad_norm": 0.8740352392196655, "learning_rate": 1.6974e-05, "loss": 0.0987, "step": 5661 }, { "epoch": 31.8112676056338, "grad_norm": 0.48888522386550903, "learning_rate": 1.6977e-05, "loss": 0.0717, "step": 5662 }, { "epoch": 31.816901408450704, "grad_norm": 0.5013949275016785, "learning_rate": 1.698e-05, "loss": 0.0675, "step": 5663 }, { "epoch": 31.822535211267606, "grad_norm": 0.7136505842208862, "learning_rate": 1.6983000000000003e-05, "loss": 0.1397, "step": 5664 }, { "epoch": 31.82816901408451, "grad_norm": 0.6881527304649353, "learning_rate": 1.6986000000000003e-05, "loss": 0.0758, "step": 5665 }, { "epoch": 31.833802816901407, "grad_norm": 0.9783480763435364, "learning_rate": 1.6989000000000003e-05, "loss": 0.1175, "step": 5666 }, { "epoch": 31.83943661971831, "grad_norm": 0.4388592541217804, "learning_rate": 1.6992e-05, "loss": 0.0733, "step": 5667 }, { "epoch": 31.845070422535212, "grad_norm": 0.4335355758666992, "learning_rate": 1.6995e-05, "loss": 0.0533, "step": 5668 }, { "epoch": 31.850704225352114, "grad_norm": 0.9735512137413025, "learning_rate": 1.6998e-05, "loss": 0.0634, "step": 5669 }, { "epoch": 31.856338028169013, "grad_norm": 0.5245184302330017, "learning_rate": 1.7001e-05, "loss": 0.0476, "step": 5670 }, { "epoch": 31.861971830985915, "grad_norm": 0.5241080522537231, "learning_rate": 1.7004e-05, "loss": 0.0319, "step": 5671 }, { "epoch": 31.867605633802818, "grad_norm": 0.44545772671699524, "learning_rate": 1.7006999999999998e-05, "loss": 0.0435, "step": 5672 }, { "epoch": 31.87323943661972, "grad_norm": 0.7443620562553406, "learning_rate": 1.7009999999999998e-05, "loss": 0.0573, "step": 5673 }, { "epoch": 31.87887323943662, "grad_norm": 0.40703633427619934, "learning_rate": 1.7013e-05, "loss": 0.0546, "step": 5674 }, { "epoch": 31.88450704225352, "grad_norm": 0.5301401019096375, "learning_rate": 1.7016e-05, "loss": 0.0306, "step": 5675 }, { "epoch": 31.890140845070423, "grad_norm": 0.5867331027984619, "learning_rate": 1.7019e-05, "loss": 0.0536, "step": 5676 }, { "epoch": 31.895774647887325, "grad_norm": 0.6047196388244629, "learning_rate": 1.7022e-05, "loss": 0.0346, "step": 5677 }, { "epoch": 31.901408450704224, "grad_norm": 0.818812370300293, "learning_rate": 1.7025e-05, "loss": 0.028, "step": 5678 }, { "epoch": 31.907042253521126, "grad_norm": 1.0952240228652954, "learning_rate": 1.7028e-05, "loss": 0.0232, "step": 5679 }, { "epoch": 31.91267605633803, "grad_norm": 0.6211145520210266, "learning_rate": 1.7031e-05, "loss": 0.0523, "step": 5680 }, { "epoch": 31.91830985915493, "grad_norm": 0.6239995360374451, "learning_rate": 1.7034e-05, "loss": 0.0475, "step": 5681 }, { "epoch": 31.92394366197183, "grad_norm": 0.8572902083396912, "learning_rate": 1.7037e-05, "loss": 0.0441, "step": 5682 }, { "epoch": 31.929577464788732, "grad_norm": 0.5140964984893799, "learning_rate": 1.704e-05, "loss": 0.0693, "step": 5683 }, { "epoch": 31.935211267605634, "grad_norm": 0.6571468710899353, "learning_rate": 1.7043000000000003e-05, "loss": 0.0771, "step": 5684 }, { "epoch": 31.940845070422537, "grad_norm": 0.43049657344818115, "learning_rate": 1.7046000000000002e-05, "loss": 0.0382, "step": 5685 }, { "epoch": 31.946478873239435, "grad_norm": 0.9170492887496948, "learning_rate": 1.7049000000000002e-05, "loss": 0.0224, "step": 5686 }, { "epoch": 31.952112676056338, "grad_norm": 0.5578094720840454, "learning_rate": 1.7052000000000002e-05, "loss": 0.0412, "step": 5687 }, { "epoch": 31.95774647887324, "grad_norm": 0.7858496308326721, "learning_rate": 1.7055000000000002e-05, "loss": 0.0358, "step": 5688 }, { "epoch": 31.963380281690142, "grad_norm": 0.3557046055793762, "learning_rate": 1.7058e-05, "loss": 0.0178, "step": 5689 }, { "epoch": 31.96901408450704, "grad_norm": 0.5020837783813477, "learning_rate": 1.7061e-05, "loss": 0.0321, "step": 5690 }, { "epoch": 31.974647887323943, "grad_norm": 0.33814722299575806, "learning_rate": 1.7064e-05, "loss": 0.0177, "step": 5691 }, { "epoch": 31.980281690140846, "grad_norm": 0.8654409050941467, "learning_rate": 1.7066999999999998e-05, "loss": 0.0565, "step": 5692 }, { "epoch": 31.985915492957748, "grad_norm": 0.5497079491615295, "learning_rate": 1.7069999999999998e-05, "loss": 0.0177, "step": 5693 }, { "epoch": 31.991549295774647, "grad_norm": 0.6990222930908203, "learning_rate": 1.7073e-05, "loss": 0.0182, "step": 5694 }, { "epoch": 31.99718309859155, "grad_norm": 0.61307293176651, "learning_rate": 1.7076e-05, "loss": 0.0495, "step": 5695 }, { "epoch": 32.0, "grad_norm": 0.3723541796207428, "learning_rate": 1.7079e-05, "loss": 0.0145, "step": 5696 }, { "epoch": 32.0056338028169, "grad_norm": 0.6744851469993591, "learning_rate": 1.7082e-05, "loss": 0.1738, "step": 5697 }, { "epoch": 32.011267605633805, "grad_norm": 0.5775330066680908, "learning_rate": 1.7085e-05, "loss": 0.1214, "step": 5698 }, { "epoch": 32.01690140845071, "grad_norm": 0.5827884674072266, "learning_rate": 1.7088e-05, "loss": 0.0937, "step": 5699 }, { "epoch": 32.02253521126761, "grad_norm": 0.5132297873497009, "learning_rate": 1.7091e-05, "loss": 0.123, "step": 5700 }, { "epoch": 32.028169014084504, "grad_norm": 0.5369969010353088, "learning_rate": 1.7094e-05, "loss": 0.0916, "step": 5701 }, { "epoch": 32.03380281690141, "grad_norm": 0.6010861992835999, "learning_rate": 1.7097e-05, "loss": 0.1157, "step": 5702 }, { "epoch": 32.03943661971831, "grad_norm": 1.0902788639068604, "learning_rate": 1.71e-05, "loss": 0.1157, "step": 5703 }, { "epoch": 32.04507042253521, "grad_norm": 0.49402740597724915, "learning_rate": 1.7103000000000002e-05, "loss": 0.0483, "step": 5704 }, { "epoch": 32.05070422535211, "grad_norm": 0.626402735710144, "learning_rate": 1.7106000000000002e-05, "loss": 0.1509, "step": 5705 }, { "epoch": 32.056338028169016, "grad_norm": 1.0383741855621338, "learning_rate": 1.7109000000000002e-05, "loss": 0.0735, "step": 5706 }, { "epoch": 32.06197183098592, "grad_norm": 0.9943619966506958, "learning_rate": 1.7112e-05, "loss": 0.1006, "step": 5707 }, { "epoch": 32.06760563380282, "grad_norm": 0.596344530582428, "learning_rate": 1.7115e-05, "loss": 0.0751, "step": 5708 }, { "epoch": 32.073239436619716, "grad_norm": 0.5486060380935669, "learning_rate": 1.7118e-05, "loss": 0.0523, "step": 5709 }, { "epoch": 32.07887323943662, "grad_norm": 0.5063339471817017, "learning_rate": 1.7121e-05, "loss": 0.0578, "step": 5710 }, { "epoch": 32.08450704225352, "grad_norm": 0.6485280394554138, "learning_rate": 1.7124e-05, "loss": 0.0435, "step": 5711 }, { "epoch": 32.09014084507042, "grad_norm": 0.7295545339584351, "learning_rate": 1.7127e-05, "loss": 0.0964, "step": 5712 }, { "epoch": 32.095774647887325, "grad_norm": 0.44090792536735535, "learning_rate": 1.713e-05, "loss": 0.0513, "step": 5713 }, { "epoch": 32.10140845070423, "grad_norm": 0.5737271904945374, "learning_rate": 1.7133000000000004e-05, "loss": 0.0457, "step": 5714 }, { "epoch": 32.10704225352113, "grad_norm": 0.49457600712776184, "learning_rate": 1.7136000000000003e-05, "loss": 0.0481, "step": 5715 }, { "epoch": 32.11267605633803, "grad_norm": 0.40526658296585083, "learning_rate": 1.7139e-05, "loss": 0.0269, "step": 5716 }, { "epoch": 32.11830985915493, "grad_norm": 0.5731679797172546, "learning_rate": 1.7142e-05, "loss": 0.0494, "step": 5717 }, { "epoch": 32.12394366197183, "grad_norm": 0.5602243542671204, "learning_rate": 1.7145e-05, "loss": 0.0328, "step": 5718 }, { "epoch": 32.12957746478873, "grad_norm": 0.6265594959259033, "learning_rate": 1.7148e-05, "loss": 0.0638, "step": 5719 }, { "epoch": 32.135211267605634, "grad_norm": 0.35774001479148865, "learning_rate": 1.7151e-05, "loss": 0.0413, "step": 5720 }, { "epoch": 32.140845070422536, "grad_norm": 0.4683723449707031, "learning_rate": 1.7154e-05, "loss": 0.0284, "step": 5721 }, { "epoch": 32.14647887323944, "grad_norm": 0.7359350323677063, "learning_rate": 1.7157e-05, "loss": 0.0509, "step": 5722 }, { "epoch": 32.15211267605634, "grad_norm": 0.4053155481815338, "learning_rate": 1.716e-05, "loss": 0.0208, "step": 5723 }, { "epoch": 32.15774647887324, "grad_norm": 1.4342669248580933, "learning_rate": 1.7163e-05, "loss": 0.0316, "step": 5724 }, { "epoch": 32.16338028169014, "grad_norm": 0.6968528032302856, "learning_rate": 1.7166e-05, "loss": 0.0257, "step": 5725 }, { "epoch": 32.16901408450704, "grad_norm": 0.9682645797729492, "learning_rate": 1.7169e-05, "loss": 0.0979, "step": 5726 }, { "epoch": 32.17464788732394, "grad_norm": 0.5105908513069153, "learning_rate": 1.7172e-05, "loss": 0.0263, "step": 5727 }, { "epoch": 32.180281690140845, "grad_norm": 0.390238881111145, "learning_rate": 1.7175e-05, "loss": 0.0302, "step": 5728 }, { "epoch": 32.18591549295775, "grad_norm": 0.5150417685508728, "learning_rate": 1.7178e-05, "loss": 0.0587, "step": 5729 }, { "epoch": 32.19154929577465, "grad_norm": 0.49772611260414124, "learning_rate": 1.7181e-05, "loss": 0.0546, "step": 5730 }, { "epoch": 32.19718309859155, "grad_norm": 0.5788368582725525, "learning_rate": 1.7184e-05, "loss": 0.0498, "step": 5731 }, { "epoch": 32.202816901408454, "grad_norm": 0.7248036861419678, "learning_rate": 1.7187e-05, "loss": 0.0348, "step": 5732 }, { "epoch": 32.20845070422535, "grad_norm": 0.590313196182251, "learning_rate": 1.719e-05, "loss": 0.0599, "step": 5733 }, { "epoch": 32.21408450704225, "grad_norm": 0.5463699698448181, "learning_rate": 1.7193000000000003e-05, "loss": 0.0287, "step": 5734 }, { "epoch": 32.219718309859154, "grad_norm": 0.7281765937805176, "learning_rate": 1.7196000000000003e-05, "loss": 0.0241, "step": 5735 }, { "epoch": 32.225352112676056, "grad_norm": 0.7981665134429932, "learning_rate": 1.7199000000000003e-05, "loss": 0.0234, "step": 5736 }, { "epoch": 32.23098591549296, "grad_norm": 0.43422624468803406, "learning_rate": 1.7202000000000002e-05, "loss": 0.0072, "step": 5737 }, { "epoch": 32.23661971830986, "grad_norm": 0.3304568827152252, "learning_rate": 1.7205000000000002e-05, "loss": 0.0274, "step": 5738 }, { "epoch": 32.24225352112676, "grad_norm": 1.1712589263916016, "learning_rate": 1.7208000000000002e-05, "loss": 0.0372, "step": 5739 }, { "epoch": 32.247887323943665, "grad_norm": 0.42175909876823425, "learning_rate": 1.7211000000000002e-05, "loss": 0.0172, "step": 5740 }, { "epoch": 32.25352112676056, "grad_norm": 0.6714392304420471, "learning_rate": 1.7213999999999998e-05, "loss": 0.129, "step": 5741 }, { "epoch": 32.25915492957746, "grad_norm": 0.6354323029518127, "learning_rate": 1.7216999999999998e-05, "loss": 0.0991, "step": 5742 }, { "epoch": 32.264788732394365, "grad_norm": 0.5006952285766602, "learning_rate": 1.7219999999999998e-05, "loss": 0.0814, "step": 5743 }, { "epoch": 32.27042253521127, "grad_norm": 0.7615952491760254, "learning_rate": 1.7223e-05, "loss": 0.1261, "step": 5744 }, { "epoch": 32.27605633802817, "grad_norm": 0.6997138857841492, "learning_rate": 1.7226e-05, "loss": 0.0899, "step": 5745 }, { "epoch": 32.28169014084507, "grad_norm": 0.6830964088439941, "learning_rate": 1.7229e-05, "loss": 0.085, "step": 5746 }, { "epoch": 32.287323943661974, "grad_norm": 0.492421418428421, "learning_rate": 1.7232e-05, "loss": 0.0808, "step": 5747 }, { "epoch": 32.292957746478876, "grad_norm": 0.610549807548523, "learning_rate": 1.7235e-05, "loss": 0.0894, "step": 5748 }, { "epoch": 32.29859154929577, "grad_norm": 0.5796210169792175, "learning_rate": 1.7238e-05, "loss": 0.0529, "step": 5749 }, { "epoch": 32.304225352112674, "grad_norm": 0.6331380605697632, "learning_rate": 1.7241e-05, "loss": 0.0678, "step": 5750 }, { "epoch": 32.309859154929576, "grad_norm": 0.43246427178382874, "learning_rate": 1.7244e-05, "loss": 0.0486, "step": 5751 }, { "epoch": 32.31549295774648, "grad_norm": 0.505200982093811, "learning_rate": 1.7247e-05, "loss": 0.0383, "step": 5752 }, { "epoch": 32.32112676056338, "grad_norm": 0.8252564072608948, "learning_rate": 1.725e-05, "loss": 0.1537, "step": 5753 }, { "epoch": 32.32676056338028, "grad_norm": 0.5698729157447815, "learning_rate": 1.7253e-05, "loss": 0.0679, "step": 5754 }, { "epoch": 32.332394366197185, "grad_norm": 0.7247756123542786, "learning_rate": 1.7256000000000002e-05, "loss": 0.071, "step": 5755 }, { "epoch": 32.33802816901409, "grad_norm": 0.4685795307159424, "learning_rate": 1.7259000000000002e-05, "loss": 0.0428, "step": 5756 }, { "epoch": 32.34366197183098, "grad_norm": 0.568182647228241, "learning_rate": 1.7262000000000002e-05, "loss": 0.0445, "step": 5757 }, { "epoch": 32.349295774647885, "grad_norm": 0.5793381929397583, "learning_rate": 1.7265e-05, "loss": 0.0571, "step": 5758 }, { "epoch": 32.35492957746479, "grad_norm": 0.6794309616088867, "learning_rate": 1.7268e-05, "loss": 0.0316, "step": 5759 }, { "epoch": 32.36056338028169, "grad_norm": 0.5766786336898804, "learning_rate": 1.7271e-05, "loss": 0.0354, "step": 5760 }, { "epoch": 32.36619718309859, "grad_norm": 0.4715847074985504, "learning_rate": 1.7274e-05, "loss": 0.0398, "step": 5761 }, { "epoch": 32.371830985915494, "grad_norm": 0.6694579720497131, "learning_rate": 1.7277e-05, "loss": 0.0346, "step": 5762 }, { "epoch": 32.3774647887324, "grad_norm": 0.4335472881793976, "learning_rate": 1.728e-05, "loss": 0.0265, "step": 5763 }, { "epoch": 32.3830985915493, "grad_norm": 0.5435230731964111, "learning_rate": 1.7283e-05, "loss": 0.023, "step": 5764 }, { "epoch": 32.388732394366194, "grad_norm": 0.5636742115020752, "learning_rate": 1.7286e-05, "loss": 0.0229, "step": 5765 }, { "epoch": 32.394366197183096, "grad_norm": 0.5728370547294617, "learning_rate": 1.7289e-05, "loss": 0.0321, "step": 5766 }, { "epoch": 32.4, "grad_norm": 0.6417511105537415, "learning_rate": 1.7292e-05, "loss": 0.0815, "step": 5767 }, { "epoch": 32.4056338028169, "grad_norm": 0.39182695746421814, "learning_rate": 1.7295e-05, "loss": 0.02, "step": 5768 }, { "epoch": 32.4112676056338, "grad_norm": 0.8462179899215698, "learning_rate": 1.7298e-05, "loss": 0.033, "step": 5769 }, { "epoch": 32.416901408450705, "grad_norm": 0.5075592994689941, "learning_rate": 1.7301e-05, "loss": 0.0431, "step": 5770 }, { "epoch": 32.42253521126761, "grad_norm": 0.3313916027545929, "learning_rate": 1.7304e-05, "loss": 0.0141, "step": 5771 }, { "epoch": 32.42816901408451, "grad_norm": 0.4199099838733673, "learning_rate": 1.7307e-05, "loss": 0.0343, "step": 5772 }, { "epoch": 32.433802816901405, "grad_norm": 0.4421937167644501, "learning_rate": 1.731e-05, "loss": 0.0197, "step": 5773 }, { "epoch": 32.43943661971831, "grad_norm": 1.2966301441192627, "learning_rate": 1.7313e-05, "loss": 0.0854, "step": 5774 }, { "epoch": 32.44507042253521, "grad_norm": 0.6689437627792358, "learning_rate": 1.7316e-05, "loss": 0.0289, "step": 5775 }, { "epoch": 32.45070422535211, "grad_norm": 0.6239901185035706, "learning_rate": 1.7319e-05, "loss": 0.0343, "step": 5776 }, { "epoch": 32.456338028169014, "grad_norm": 0.8956945538520813, "learning_rate": 1.7322e-05, "loss": 0.0595, "step": 5777 }, { "epoch": 32.46197183098592, "grad_norm": 0.9170767068862915, "learning_rate": 1.7325e-05, "loss": 0.0418, "step": 5778 }, { "epoch": 32.46760563380282, "grad_norm": 0.6594442129135132, "learning_rate": 1.7328e-05, "loss": 0.0351, "step": 5779 }, { "epoch": 32.47323943661972, "grad_norm": 0.33455052971839905, "learning_rate": 1.7331e-05, "loss": 0.0117, "step": 5780 }, { "epoch": 32.478873239436616, "grad_norm": 0.5202895402908325, "learning_rate": 1.7334e-05, "loss": 0.0142, "step": 5781 }, { "epoch": 32.48450704225352, "grad_norm": 0.589869499206543, "learning_rate": 1.7337e-05, "loss": 0.0582, "step": 5782 }, { "epoch": 32.49014084507042, "grad_norm": 0.3537789583206177, "learning_rate": 1.734e-05, "loss": 0.0325, "step": 5783 }, { "epoch": 32.49577464788732, "grad_norm": 0.3168315589427948, "learning_rate": 1.7343e-05, "loss": 0.0215, "step": 5784 }, { "epoch": 32.501408450704226, "grad_norm": 0.9230402112007141, "learning_rate": 1.7346000000000003e-05, "loss": 0.1869, "step": 5785 }, { "epoch": 32.50704225352113, "grad_norm": 0.5357785820960999, "learning_rate": 1.7349000000000003e-05, "loss": 0.0807, "step": 5786 }, { "epoch": 32.51267605633803, "grad_norm": 0.5515801310539246, "learning_rate": 1.7352000000000003e-05, "loss": 0.0895, "step": 5787 }, { "epoch": 32.51830985915493, "grad_norm": 0.5841496586799622, "learning_rate": 1.7355000000000002e-05, "loss": 0.0758, "step": 5788 }, { "epoch": 32.52394366197183, "grad_norm": 0.6079568862915039, "learning_rate": 1.7358000000000002e-05, "loss": 0.1425, "step": 5789 }, { "epoch": 32.52957746478873, "grad_norm": 0.6329312324523926, "learning_rate": 1.7361e-05, "loss": 0.0973, "step": 5790 }, { "epoch": 32.53521126760563, "grad_norm": 0.57078617811203, "learning_rate": 1.7364e-05, "loss": 0.1011, "step": 5791 }, { "epoch": 32.540845070422534, "grad_norm": 0.4664309024810791, "learning_rate": 1.7366999999999998e-05, "loss": 0.0538, "step": 5792 }, { "epoch": 32.54647887323944, "grad_norm": 0.6201039552688599, "learning_rate": 1.7369999999999998e-05, "loss": 0.0702, "step": 5793 }, { "epoch": 32.55211267605634, "grad_norm": 0.6031767129898071, "learning_rate": 1.7372999999999998e-05, "loss": 0.0545, "step": 5794 }, { "epoch": 32.55774647887324, "grad_norm": 0.47419947385787964, "learning_rate": 1.7376e-05, "loss": 0.0661, "step": 5795 }, { "epoch": 32.563380281690144, "grad_norm": 0.5877168774604797, "learning_rate": 1.7379e-05, "loss": 0.068, "step": 5796 }, { "epoch": 32.56901408450704, "grad_norm": 0.43133413791656494, "learning_rate": 1.7382e-05, "loss": 0.0696, "step": 5797 }, { "epoch": 32.57464788732394, "grad_norm": 0.5321900844573975, "learning_rate": 1.7385e-05, "loss": 0.0413, "step": 5798 }, { "epoch": 32.58028169014084, "grad_norm": 0.4986403286457062, "learning_rate": 1.7388e-05, "loss": 0.0461, "step": 5799 }, { "epoch": 32.585915492957746, "grad_norm": 0.6751270294189453, "learning_rate": 1.7391e-05, "loss": 0.0629, "step": 5800 }, { "epoch": 32.59154929577465, "grad_norm": 0.5142871141433716, "learning_rate": 1.7394e-05, "loss": 0.0605, "step": 5801 }, { "epoch": 32.59718309859155, "grad_norm": 0.42698249220848083, "learning_rate": 1.7397e-05, "loss": 0.0435, "step": 5802 }, { "epoch": 32.60281690140845, "grad_norm": 0.6031014323234558, "learning_rate": 1.74e-05, "loss": 0.0531, "step": 5803 }, { "epoch": 32.608450704225355, "grad_norm": 0.47830337285995483, "learning_rate": 1.7403e-05, "loss": 0.0332, "step": 5804 }, { "epoch": 32.61408450704225, "grad_norm": 0.8209830522537231, "learning_rate": 1.7406000000000002e-05, "loss": 0.0649, "step": 5805 }, { "epoch": 32.61971830985915, "grad_norm": 0.534038782119751, "learning_rate": 1.7409000000000002e-05, "loss": 0.0589, "step": 5806 }, { "epoch": 32.625352112676055, "grad_norm": 0.6797208189964294, "learning_rate": 1.7412000000000002e-05, "loss": 0.0476, "step": 5807 }, { "epoch": 32.63098591549296, "grad_norm": 1.0539840459823608, "learning_rate": 1.7415000000000002e-05, "loss": 0.0522, "step": 5808 }, { "epoch": 32.63661971830986, "grad_norm": 0.5323993563652039, "learning_rate": 1.7418e-05, "loss": 0.0245, "step": 5809 }, { "epoch": 32.64225352112676, "grad_norm": 0.39119380712509155, "learning_rate": 1.7421e-05, "loss": 0.0454, "step": 5810 }, { "epoch": 32.647887323943664, "grad_norm": 0.45977240800857544, "learning_rate": 1.7424e-05, "loss": 0.031, "step": 5811 }, { "epoch": 32.653521126760566, "grad_norm": 0.648202121257782, "learning_rate": 1.7427e-05, "loss": 0.0303, "step": 5812 }, { "epoch": 32.65915492957747, "grad_norm": 0.34546196460723877, "learning_rate": 1.743e-05, "loss": 0.0208, "step": 5813 }, { "epoch": 32.66478873239436, "grad_norm": 0.3723370134830475, "learning_rate": 1.7432999999999997e-05, "loss": 0.0205, "step": 5814 }, { "epoch": 32.670422535211266, "grad_norm": 0.6415125131607056, "learning_rate": 1.7436e-05, "loss": 0.0248, "step": 5815 }, { "epoch": 32.67605633802817, "grad_norm": 0.7809388637542725, "learning_rate": 1.7439e-05, "loss": 0.0443, "step": 5816 }, { "epoch": 32.68169014084507, "grad_norm": 0.6216216087341309, "learning_rate": 1.7442e-05, "loss": 0.0937, "step": 5817 }, { "epoch": 32.68732394366197, "grad_norm": 0.5598334670066833, "learning_rate": 1.7445e-05, "loss": 0.0415, "step": 5818 }, { "epoch": 32.692957746478875, "grad_norm": 0.39764827489852905, "learning_rate": 1.7448e-05, "loss": 0.0202, "step": 5819 }, { "epoch": 32.69859154929578, "grad_norm": 1.0863068103790283, "learning_rate": 1.7451e-05, "loss": 0.038, "step": 5820 }, { "epoch": 32.70422535211267, "grad_norm": 0.40128710865974426, "learning_rate": 1.7454e-05, "loss": 0.0132, "step": 5821 }, { "epoch": 32.709859154929575, "grad_norm": 0.5810030698776245, "learning_rate": 1.7457e-05, "loss": 0.0332, "step": 5822 }, { "epoch": 32.71549295774648, "grad_norm": 0.3111312985420227, "learning_rate": 1.746e-05, "loss": 0.0137, "step": 5823 }, { "epoch": 32.72112676056338, "grad_norm": 0.7748981714248657, "learning_rate": 1.7463e-05, "loss": 0.0786, "step": 5824 }, { "epoch": 32.72676056338028, "grad_norm": 0.44803911447525024, "learning_rate": 1.7466000000000002e-05, "loss": 0.0197, "step": 5825 }, { "epoch": 32.732394366197184, "grad_norm": 0.39188578724861145, "learning_rate": 1.7469e-05, "loss": 0.0155, "step": 5826 }, { "epoch": 32.738028169014086, "grad_norm": 0.9083210825920105, "learning_rate": 1.7472e-05, "loss": 0.0407, "step": 5827 }, { "epoch": 32.74366197183099, "grad_norm": 1.0422269105911255, "learning_rate": 1.7475e-05, "loss": 0.0333, "step": 5828 }, { "epoch": 32.74929577464789, "grad_norm": 0.6860128045082092, "learning_rate": 1.7478e-05, "loss": 0.158, "step": 5829 }, { "epoch": 32.754929577464786, "grad_norm": 0.5848652720451355, "learning_rate": 1.7481e-05, "loss": 0.0934, "step": 5830 }, { "epoch": 32.76056338028169, "grad_norm": 0.5980837941169739, "learning_rate": 1.7484e-05, "loss": 0.1014, "step": 5831 }, { "epoch": 32.76619718309859, "grad_norm": 0.6003318428993225, "learning_rate": 1.7487e-05, "loss": 0.1112, "step": 5832 }, { "epoch": 32.77183098591549, "grad_norm": 0.6089819073677063, "learning_rate": 1.749e-05, "loss": 0.0741, "step": 5833 }, { "epoch": 32.777464788732395, "grad_norm": 0.5252190232276917, "learning_rate": 1.7493e-05, "loss": 0.0804, "step": 5834 }, { "epoch": 32.7830985915493, "grad_norm": 0.7321880459785461, "learning_rate": 1.7496000000000003e-05, "loss": 0.0916, "step": 5835 }, { "epoch": 32.7887323943662, "grad_norm": 0.5927001237869263, "learning_rate": 1.7499000000000003e-05, "loss": 0.112, "step": 5836 }, { "epoch": 32.7943661971831, "grad_norm": 0.6588932275772095, "learning_rate": 1.7502000000000003e-05, "loss": 0.0658, "step": 5837 }, { "epoch": 32.8, "grad_norm": 0.5907939076423645, "learning_rate": 1.7505000000000003e-05, "loss": 0.0713, "step": 5838 }, { "epoch": 32.8056338028169, "grad_norm": 0.5289585590362549, "learning_rate": 1.7508e-05, "loss": 0.0744, "step": 5839 }, { "epoch": 32.8112676056338, "grad_norm": 0.5498076677322388, "learning_rate": 1.7511e-05, "loss": 0.0547, "step": 5840 }, { "epoch": 32.816901408450704, "grad_norm": 0.6080731153488159, "learning_rate": 1.7514e-05, "loss": 0.0939, "step": 5841 }, { "epoch": 32.822535211267606, "grad_norm": 0.5392447113990784, "learning_rate": 1.7517e-05, "loss": 0.0671, "step": 5842 }, { "epoch": 32.82816901408451, "grad_norm": 0.587604284286499, "learning_rate": 1.7519999999999998e-05, "loss": 0.0778, "step": 5843 }, { "epoch": 32.83380281690141, "grad_norm": 1.0170047283172607, "learning_rate": 1.7522999999999998e-05, "loss": 0.0731, "step": 5844 }, { "epoch": 32.83943661971831, "grad_norm": 0.4489835202693939, "learning_rate": 1.7526e-05, "loss": 0.0403, "step": 5845 }, { "epoch": 32.84507042253521, "grad_norm": 0.5853557586669922, "learning_rate": 1.7529e-05, "loss": 0.0759, "step": 5846 }, { "epoch": 32.85070422535211, "grad_norm": 0.4257259666919708, "learning_rate": 1.7532e-05, "loss": 0.0278, "step": 5847 }, { "epoch": 32.85633802816901, "grad_norm": 0.874121904373169, "learning_rate": 1.7535e-05, "loss": 0.046, "step": 5848 }, { "epoch": 32.861971830985915, "grad_norm": 0.4919375479221344, "learning_rate": 1.7538e-05, "loss": 0.0441, "step": 5849 }, { "epoch": 32.86760563380282, "grad_norm": 1.3954317569732666, "learning_rate": 1.7541e-05, "loss": 0.0546, "step": 5850 }, { "epoch": 32.87323943661972, "grad_norm": 0.7131764888763428, "learning_rate": 1.7544e-05, "loss": 0.0572, "step": 5851 }, { "epoch": 32.87887323943662, "grad_norm": 0.4996379613876343, "learning_rate": 1.7547e-05, "loss": 0.0267, "step": 5852 }, { "epoch": 32.884507042253524, "grad_norm": 0.6773791909217834, "learning_rate": 1.755e-05, "loss": 0.0644, "step": 5853 }, { "epoch": 32.89014084507042, "grad_norm": 0.5686938762664795, "learning_rate": 1.7553e-05, "loss": 0.0275, "step": 5854 }, { "epoch": 32.89577464788732, "grad_norm": 0.8687219023704529, "learning_rate": 1.7556000000000003e-05, "loss": 0.0491, "step": 5855 }, { "epoch": 32.901408450704224, "grad_norm": 0.5777212977409363, "learning_rate": 1.7559000000000002e-05, "loss": 0.0292, "step": 5856 }, { "epoch": 32.907042253521126, "grad_norm": 0.9396227598190308, "learning_rate": 1.7562000000000002e-05, "loss": 0.0472, "step": 5857 }, { "epoch": 32.91267605633803, "grad_norm": 0.3443004786968231, "learning_rate": 1.7565000000000002e-05, "loss": 0.017, "step": 5858 }, { "epoch": 32.91830985915493, "grad_norm": 1.2386078834533691, "learning_rate": 1.7568000000000002e-05, "loss": 0.0217, "step": 5859 }, { "epoch": 32.92394366197183, "grad_norm": 0.7961623072624207, "learning_rate": 1.7571e-05, "loss": 0.0311, "step": 5860 }, { "epoch": 32.929577464788736, "grad_norm": 0.5422600507736206, "learning_rate": 1.7574e-05, "loss": 0.0524, "step": 5861 }, { "epoch": 32.93521126760563, "grad_norm": 0.352385014295578, "learning_rate": 1.7577e-05, "loss": 0.0281, "step": 5862 }, { "epoch": 32.94084507042253, "grad_norm": 0.4244561791419983, "learning_rate": 1.758e-05, "loss": 0.0261, "step": 5863 }, { "epoch": 32.946478873239435, "grad_norm": 0.49981990456581116, "learning_rate": 1.7582999999999998e-05, "loss": 0.034, "step": 5864 }, { "epoch": 32.95211267605634, "grad_norm": 0.3571464419364929, "learning_rate": 1.7586e-05, "loss": 0.0335, "step": 5865 }, { "epoch": 32.95774647887324, "grad_norm": 0.6573164463043213, "learning_rate": 1.7589e-05, "loss": 0.026, "step": 5866 }, { "epoch": 32.96338028169014, "grad_norm": 0.8562104105949402, "learning_rate": 1.7592e-05, "loss": 0.0342, "step": 5867 }, { "epoch": 32.969014084507045, "grad_norm": 2.5097033977508545, "learning_rate": 1.7595e-05, "loss": 0.081, "step": 5868 }, { "epoch": 32.97464788732395, "grad_norm": 0.7428569197654724, "learning_rate": 1.7598e-05, "loss": 0.0203, "step": 5869 }, { "epoch": 32.98028169014084, "grad_norm": 1.1097179651260376, "learning_rate": 1.7601e-05, "loss": 0.0669, "step": 5870 }, { "epoch": 32.985915492957744, "grad_norm": 0.4538474977016449, "learning_rate": 1.7604e-05, "loss": 0.0212, "step": 5871 }, { "epoch": 32.99154929577465, "grad_norm": 0.5847488045692444, "learning_rate": 1.7607e-05, "loss": 0.0244, "step": 5872 }, { "epoch": 32.99718309859155, "grad_norm": 1.0133192539215088, "learning_rate": 1.761e-05, "loss": 0.0943, "step": 5873 }, { "epoch": 33.0, "grad_norm": 0.289931982755661, "learning_rate": 1.7613e-05, "loss": 0.0054, "step": 5874 }, { "epoch": 33.0056338028169, "grad_norm": 1.026155710220337, "learning_rate": 1.7616000000000002e-05, "loss": 0.2199, "step": 5875 }, { "epoch": 33.011267605633805, "grad_norm": 0.604045569896698, "learning_rate": 1.7619000000000002e-05, "loss": 0.0803, "step": 5876 }, { "epoch": 33.01690140845071, "grad_norm": 0.730092465877533, "learning_rate": 1.7622000000000002e-05, "loss": 0.1126, "step": 5877 }, { "epoch": 33.02253521126761, "grad_norm": 0.6829853057861328, "learning_rate": 1.7625e-05, "loss": 0.1237, "step": 5878 }, { "epoch": 33.028169014084504, "grad_norm": 0.5406138300895691, "learning_rate": 1.7628e-05, "loss": 0.0916, "step": 5879 }, { "epoch": 33.03380281690141, "grad_norm": 0.4970773458480835, "learning_rate": 1.7631e-05, "loss": 0.058, "step": 5880 }, { "epoch": 33.03943661971831, "grad_norm": 0.6569548845291138, "learning_rate": 1.7634e-05, "loss": 0.1402, "step": 5881 }, { "epoch": 33.04507042253521, "grad_norm": 0.6286707520484924, "learning_rate": 1.7637e-05, "loss": 0.0853, "step": 5882 }, { "epoch": 33.05070422535211, "grad_norm": 0.6703985333442688, "learning_rate": 1.764e-05, "loss": 0.0544, "step": 5883 }, { "epoch": 33.056338028169016, "grad_norm": 0.5687394738197327, "learning_rate": 1.7643e-05, "loss": 0.0515, "step": 5884 }, { "epoch": 33.06197183098592, "grad_norm": 0.47109344601631165, "learning_rate": 1.7646e-05, "loss": 0.0474, "step": 5885 }, { "epoch": 33.06760563380282, "grad_norm": 0.6710957884788513, "learning_rate": 1.7649000000000003e-05, "loss": 0.0713, "step": 5886 }, { "epoch": 33.073239436619716, "grad_norm": 0.48007217049598694, "learning_rate": 1.7652000000000003e-05, "loss": 0.0662, "step": 5887 }, { "epoch": 33.07887323943662, "grad_norm": 0.5431410074234009, "learning_rate": 1.7655e-05, "loss": 0.0433, "step": 5888 }, { "epoch": 33.08450704225352, "grad_norm": 0.4844655394554138, "learning_rate": 1.7658e-05, "loss": 0.049, "step": 5889 }, { "epoch": 33.09014084507042, "grad_norm": 0.5238877534866333, "learning_rate": 1.7661e-05, "loss": 0.0617, "step": 5890 }, { "epoch": 33.095774647887325, "grad_norm": 0.5567482113838196, "learning_rate": 1.7664e-05, "loss": 0.0392, "step": 5891 }, { "epoch": 33.10140845070423, "grad_norm": 0.49890193343162537, "learning_rate": 1.7667e-05, "loss": 0.052, "step": 5892 }, { "epoch": 33.10704225352113, "grad_norm": 0.7124143242835999, "learning_rate": 1.767e-05, "loss": 0.0447, "step": 5893 }, { "epoch": 33.11267605633803, "grad_norm": 0.49551934003829956, "learning_rate": 1.7673e-05, "loss": 0.0323, "step": 5894 }, { "epoch": 33.11830985915493, "grad_norm": 0.7350240349769592, "learning_rate": 1.7675999999999998e-05, "loss": 0.0254, "step": 5895 }, { "epoch": 33.12394366197183, "grad_norm": 0.7109642624855042, "learning_rate": 1.7679e-05, "loss": 0.0591, "step": 5896 }, { "epoch": 33.12957746478873, "grad_norm": 0.552890956401825, "learning_rate": 1.7682e-05, "loss": 0.0654, "step": 5897 }, { "epoch": 33.135211267605634, "grad_norm": 1.005934238433838, "learning_rate": 1.7685e-05, "loss": 0.0334, "step": 5898 }, { "epoch": 33.140845070422536, "grad_norm": 0.5281128287315369, "learning_rate": 1.7688e-05, "loss": 0.0257, "step": 5899 }, { "epoch": 33.14647887323944, "grad_norm": 0.41529062390327454, "learning_rate": 1.7691e-05, "loss": 0.0177, "step": 5900 }, { "epoch": 33.15211267605634, "grad_norm": 0.4439494013786316, "learning_rate": 1.7694e-05, "loss": 0.0707, "step": 5901 }, { "epoch": 33.15774647887324, "grad_norm": 0.7763503193855286, "learning_rate": 1.7697e-05, "loss": 0.0273, "step": 5902 }, { "epoch": 33.16338028169014, "grad_norm": 0.37314170598983765, "learning_rate": 1.77e-05, "loss": 0.0182, "step": 5903 }, { "epoch": 33.16901408450704, "grad_norm": 0.7532930970191956, "learning_rate": 1.7703e-05, "loss": 0.0176, "step": 5904 }, { "epoch": 33.17464788732394, "grad_norm": 0.38727957010269165, "learning_rate": 1.7706e-05, "loss": 0.0225, "step": 5905 }, { "epoch": 33.180281690140845, "grad_norm": 0.660379946231842, "learning_rate": 1.7709000000000003e-05, "loss": 0.0385, "step": 5906 }, { "epoch": 33.18591549295775, "grad_norm": 0.5975499153137207, "learning_rate": 1.7712000000000003e-05, "loss": 0.0585, "step": 5907 }, { "epoch": 33.19154929577465, "grad_norm": 0.7856644988059998, "learning_rate": 1.7715000000000002e-05, "loss": 0.0518, "step": 5908 }, { "epoch": 33.19718309859155, "grad_norm": 0.7501412630081177, "learning_rate": 1.7718000000000002e-05, "loss": 0.0348, "step": 5909 }, { "epoch": 33.202816901408454, "grad_norm": 0.8232806921005249, "learning_rate": 1.7721000000000002e-05, "loss": 0.0231, "step": 5910 }, { "epoch": 33.20845070422535, "grad_norm": 0.5013696551322937, "learning_rate": 1.7724000000000002e-05, "loss": 0.022, "step": 5911 }, { "epoch": 33.21408450704225, "grad_norm": 0.5882750749588013, "learning_rate": 1.7727e-05, "loss": 0.033, "step": 5912 }, { "epoch": 33.219718309859154, "grad_norm": 0.7300570011138916, "learning_rate": 1.7729999999999998e-05, "loss": 0.0289, "step": 5913 }, { "epoch": 33.225352112676056, "grad_norm": 1.53815495967865, "learning_rate": 1.7732999999999998e-05, "loss": 0.0422, "step": 5914 }, { "epoch": 33.23098591549296, "grad_norm": 0.40931007266044617, "learning_rate": 1.7735999999999998e-05, "loss": 0.0146, "step": 5915 }, { "epoch": 33.23661971830986, "grad_norm": 0.35870063304901123, "learning_rate": 1.7739e-05, "loss": 0.01, "step": 5916 }, { "epoch": 33.24225352112676, "grad_norm": 0.6509513854980469, "learning_rate": 1.7742e-05, "loss": 0.0645, "step": 5917 }, { "epoch": 33.247887323943665, "grad_norm": 0.48281463980674744, "learning_rate": 1.7745e-05, "loss": 0.0161, "step": 5918 }, { "epoch": 33.25352112676056, "grad_norm": 1.049398422241211, "learning_rate": 1.7748e-05, "loss": 0.1176, "step": 5919 }, { "epoch": 33.25915492957746, "grad_norm": 0.7739635109901428, "learning_rate": 1.7751e-05, "loss": 0.1301, "step": 5920 }, { "epoch": 33.264788732394365, "grad_norm": 0.6575800776481628, "learning_rate": 1.7754e-05, "loss": 0.0957, "step": 5921 }, { "epoch": 33.27042253521127, "grad_norm": 0.6099708080291748, "learning_rate": 1.7757e-05, "loss": 0.113, "step": 5922 }, { "epoch": 33.27605633802817, "grad_norm": 0.5925390124320984, "learning_rate": 1.776e-05, "loss": 0.0756, "step": 5923 }, { "epoch": 33.28169014084507, "grad_norm": 0.650711715221405, "learning_rate": 1.7763e-05, "loss": 0.0737, "step": 5924 }, { "epoch": 33.287323943661974, "grad_norm": 0.4748885929584503, "learning_rate": 1.7766e-05, "loss": 0.0592, "step": 5925 }, { "epoch": 33.292957746478876, "grad_norm": 0.8892757892608643, "learning_rate": 1.7769000000000002e-05, "loss": 0.077, "step": 5926 }, { "epoch": 33.29859154929577, "grad_norm": 0.6461554169654846, "learning_rate": 1.7772000000000002e-05, "loss": 0.0726, "step": 5927 }, { "epoch": 33.304225352112674, "grad_norm": 0.5602725744247437, "learning_rate": 1.7775000000000002e-05, "loss": 0.0424, "step": 5928 }, { "epoch": 33.309859154929576, "grad_norm": 1.0738310813903809, "learning_rate": 1.7778e-05, "loss": 0.1219, "step": 5929 }, { "epoch": 33.31549295774648, "grad_norm": 0.49084675312042236, "learning_rate": 1.7781e-05, "loss": 0.0496, "step": 5930 }, { "epoch": 33.32112676056338, "grad_norm": 0.6464419364929199, "learning_rate": 1.7784e-05, "loss": 0.0662, "step": 5931 }, { "epoch": 33.32676056338028, "grad_norm": 0.7269509434700012, "learning_rate": 1.7787e-05, "loss": 0.072, "step": 5932 }, { "epoch": 33.332394366197185, "grad_norm": 0.4730350077152252, "learning_rate": 1.779e-05, "loss": 0.0624, "step": 5933 }, { "epoch": 33.33802816901409, "grad_norm": 0.5698527693748474, "learning_rate": 1.7793e-05, "loss": 0.0417, "step": 5934 }, { "epoch": 33.34366197183098, "grad_norm": 0.5374443531036377, "learning_rate": 1.7796e-05, "loss": 0.0586, "step": 5935 }, { "epoch": 33.349295774647885, "grad_norm": 0.5013724565505981, "learning_rate": 1.7799000000000004e-05, "loss": 0.0379, "step": 5936 }, { "epoch": 33.35492957746479, "grad_norm": 0.35774552822113037, "learning_rate": 1.7802e-05, "loss": 0.0245, "step": 5937 }, { "epoch": 33.36056338028169, "grad_norm": 0.6704871654510498, "learning_rate": 1.7805e-05, "loss": 0.0442, "step": 5938 }, { "epoch": 33.36619718309859, "grad_norm": 0.5915015339851379, "learning_rate": 1.7808e-05, "loss": 0.0599, "step": 5939 }, { "epoch": 33.371830985915494, "grad_norm": 0.3818640112876892, "learning_rate": 1.7811e-05, "loss": 0.0266, "step": 5940 }, { "epoch": 33.3774647887324, "grad_norm": 0.4919954836368561, "learning_rate": 1.7814e-05, "loss": 0.0326, "step": 5941 }, { "epoch": 33.3830985915493, "grad_norm": 0.42707616090774536, "learning_rate": 1.7817e-05, "loss": 0.0315, "step": 5942 }, { "epoch": 33.388732394366194, "grad_norm": 0.5632707476615906, "learning_rate": 1.782e-05, "loss": 0.0349, "step": 5943 }, { "epoch": 33.394366197183096, "grad_norm": 0.4194190204143524, "learning_rate": 1.7823e-05, "loss": 0.0484, "step": 5944 }, { "epoch": 33.4, "grad_norm": 0.5260012745857239, "learning_rate": 1.7826e-05, "loss": 0.0426, "step": 5945 }, { "epoch": 33.4056338028169, "grad_norm": 0.5825883150100708, "learning_rate": 1.7829e-05, "loss": 0.0233, "step": 5946 }, { "epoch": 33.4112676056338, "grad_norm": 0.411047101020813, "learning_rate": 1.7832e-05, "loss": 0.0167, "step": 5947 }, { "epoch": 33.416901408450705, "grad_norm": 0.8185343742370605, "learning_rate": 1.7835e-05, "loss": 0.0463, "step": 5948 }, { "epoch": 33.42253521126761, "grad_norm": 0.5584174990653992, "learning_rate": 1.7838e-05, "loss": 0.0567, "step": 5949 }, { "epoch": 33.42816901408451, "grad_norm": 0.5381035804748535, "learning_rate": 1.7841e-05, "loss": 0.0622, "step": 5950 }, { "epoch": 33.433802816901405, "grad_norm": 0.3509693145751953, "learning_rate": 1.7844e-05, "loss": 0.031, "step": 5951 }, { "epoch": 33.43943661971831, "grad_norm": 0.518997311592102, "learning_rate": 1.7847e-05, "loss": 0.0758, "step": 5952 }, { "epoch": 33.44507042253521, "grad_norm": 0.4926474392414093, "learning_rate": 1.785e-05, "loss": 0.0629, "step": 5953 }, { "epoch": 33.45070422535211, "grad_norm": 0.6225311756134033, "learning_rate": 1.7853e-05, "loss": 0.0204, "step": 5954 }, { "epoch": 33.456338028169014, "grad_norm": 0.48841506242752075, "learning_rate": 1.7856e-05, "loss": 0.0176, "step": 5955 }, { "epoch": 33.46197183098592, "grad_norm": 0.5085184574127197, "learning_rate": 1.7859000000000003e-05, "loss": 0.0556, "step": 5956 }, { "epoch": 33.46760563380282, "grad_norm": 0.7255944013595581, "learning_rate": 1.7862000000000003e-05, "loss": 0.0271, "step": 5957 }, { "epoch": 33.47323943661972, "grad_norm": 0.2985785901546478, "learning_rate": 1.7865000000000003e-05, "loss": 0.0387, "step": 5958 }, { "epoch": 33.478873239436616, "grad_norm": 0.3104129135608673, "learning_rate": 1.7868000000000002e-05, "loss": 0.0121, "step": 5959 }, { "epoch": 33.48450704225352, "grad_norm": 0.5367434620857239, "learning_rate": 1.7871000000000002e-05, "loss": 0.0245, "step": 5960 }, { "epoch": 33.49014084507042, "grad_norm": 0.47925323247909546, "learning_rate": 1.7874000000000002e-05, "loss": 0.0173, "step": 5961 }, { "epoch": 33.49577464788732, "grad_norm": 0.2987542748451233, "learning_rate": 1.7877e-05, "loss": 0.0174, "step": 5962 }, { "epoch": 33.501408450704226, "grad_norm": 3.614475727081299, "learning_rate": 1.7879999999999998e-05, "loss": 0.1928, "step": 5963 }, { "epoch": 33.50704225352113, "grad_norm": 0.6507062911987305, "learning_rate": 1.7882999999999998e-05, "loss": 0.1168, "step": 5964 }, { "epoch": 33.51267605633803, "grad_norm": 0.5890589952468872, "learning_rate": 1.7885999999999998e-05, "loss": 0.0833, "step": 5965 }, { "epoch": 33.51830985915493, "grad_norm": 0.4482320249080658, "learning_rate": 1.7889e-05, "loss": 0.0634, "step": 5966 }, { "epoch": 33.52394366197183, "grad_norm": 0.570649266242981, "learning_rate": 1.7892e-05, "loss": 0.098, "step": 5967 }, { "epoch": 33.52957746478873, "grad_norm": 0.6110566854476929, "learning_rate": 1.7895e-05, "loss": 0.0895, "step": 5968 }, { "epoch": 33.53521126760563, "grad_norm": 0.6461649537086487, "learning_rate": 1.7898e-05, "loss": 0.0758, "step": 5969 }, { "epoch": 33.540845070422534, "grad_norm": 0.7147820591926575, "learning_rate": 1.7901e-05, "loss": 0.1021, "step": 5970 }, { "epoch": 33.54647887323944, "grad_norm": 1.7325581312179565, "learning_rate": 1.7904e-05, "loss": 0.113, "step": 5971 }, { "epoch": 33.55211267605634, "grad_norm": 0.5493667721748352, "learning_rate": 1.7907e-05, "loss": 0.0559, "step": 5972 }, { "epoch": 33.55774647887324, "grad_norm": 0.6816404461860657, "learning_rate": 1.791e-05, "loss": 0.0422, "step": 5973 }, { "epoch": 33.563380281690144, "grad_norm": 0.3846578299999237, "learning_rate": 1.7913e-05, "loss": 0.0336, "step": 5974 }, { "epoch": 33.56901408450704, "grad_norm": 0.5620394349098206, "learning_rate": 1.7916e-05, "loss": 0.1025, "step": 5975 }, { "epoch": 33.57464788732394, "grad_norm": 0.6103384494781494, "learning_rate": 1.7919000000000002e-05, "loss": 0.0772, "step": 5976 }, { "epoch": 33.58028169014084, "grad_norm": 0.49865084886550903, "learning_rate": 1.7922000000000002e-05, "loss": 0.052, "step": 5977 }, { "epoch": 33.585915492957746, "grad_norm": 0.5083425641059875, "learning_rate": 1.7925000000000002e-05, "loss": 0.0722, "step": 5978 }, { "epoch": 33.59154929577465, "grad_norm": 0.40726104378700256, "learning_rate": 1.7928000000000002e-05, "loss": 0.0269, "step": 5979 }, { "epoch": 33.59718309859155, "grad_norm": 0.4311872720718384, "learning_rate": 1.7931e-05, "loss": 0.0674, "step": 5980 }, { "epoch": 33.60281690140845, "grad_norm": 1.0237548351287842, "learning_rate": 1.7934e-05, "loss": 0.0468, "step": 5981 }, { "epoch": 33.608450704225355, "grad_norm": 0.6325929760932922, "learning_rate": 1.7937e-05, "loss": 0.0341, "step": 5982 }, { "epoch": 33.61408450704225, "grad_norm": 0.41311439871788025, "learning_rate": 1.794e-05, "loss": 0.031, "step": 5983 }, { "epoch": 33.61971830985915, "grad_norm": 0.7665412425994873, "learning_rate": 1.7943e-05, "loss": 0.0489, "step": 5984 }, { "epoch": 33.625352112676055, "grad_norm": 0.8757607340812683, "learning_rate": 1.7946e-05, "loss": 0.0772, "step": 5985 }, { "epoch": 33.63098591549296, "grad_norm": 0.5199142098426819, "learning_rate": 1.7949e-05, "loss": 0.0187, "step": 5986 }, { "epoch": 33.63661971830986, "grad_norm": 0.4677933156490326, "learning_rate": 1.7952e-05, "loss": 0.0409, "step": 5987 }, { "epoch": 33.64225352112676, "grad_norm": 0.46251195669174194, "learning_rate": 1.7955e-05, "loss": 0.0211, "step": 5988 }, { "epoch": 33.647887323943664, "grad_norm": 0.47680702805519104, "learning_rate": 1.7958e-05, "loss": 0.0354, "step": 5989 }, { "epoch": 33.653521126760566, "grad_norm": 0.5932157635688782, "learning_rate": 1.7961e-05, "loss": 0.024, "step": 5990 }, { "epoch": 33.65915492957747, "grad_norm": 0.2997179329395294, "learning_rate": 1.7964e-05, "loss": 0.0147, "step": 5991 }, { "epoch": 33.66478873239436, "grad_norm": 0.48336535692214966, "learning_rate": 1.7967e-05, "loss": 0.0378, "step": 5992 }, { "epoch": 33.670422535211266, "grad_norm": 0.5695717334747314, "learning_rate": 1.797e-05, "loss": 0.0241, "step": 5993 }, { "epoch": 33.67605633802817, "grad_norm": 0.6262441873550415, "learning_rate": 1.7973e-05, "loss": 0.0611, "step": 5994 }, { "epoch": 33.68169014084507, "grad_norm": 0.6076148152351379, "learning_rate": 1.7976e-05, "loss": 0.0243, "step": 5995 }, { "epoch": 33.68732394366197, "grad_norm": 0.4330865442752838, "learning_rate": 1.7979000000000002e-05, "loss": 0.0184, "step": 5996 }, { "epoch": 33.692957746478875, "grad_norm": 0.374049574136734, "learning_rate": 1.7982e-05, "loss": 0.0263, "step": 5997 }, { "epoch": 33.69859154929578, "grad_norm": 0.4243713319301605, "learning_rate": 1.7985e-05, "loss": 0.0196, "step": 5998 }, { "epoch": 33.70422535211267, "grad_norm": 0.35133248567581177, "learning_rate": 1.7988e-05, "loss": 0.0117, "step": 5999 }, { "epoch": 33.709859154929575, "grad_norm": 0.39159947633743286, "learning_rate": 1.7991e-05, "loss": 0.0169, "step": 6000 }, { "epoch": 33.709859154929575, "eval_cer": 0.10856209658604868, "eval_loss": 0.4053402543067932, "eval_runtime": 15.7323, "eval_samples_per_second": 19.323, "eval_steps_per_second": 0.636, "eval_wer": 0.37989255564082886, "step": 6000 }, { "epoch": 33.71549295774648, "grad_norm": 0.531837522983551, "learning_rate": 1.7994e-05, "loss": 0.0266, "step": 6001 }, { "epoch": 33.72112676056338, "grad_norm": 0.9081438183784485, "learning_rate": 1.7997e-05, "loss": 0.0401, "step": 6002 }, { "epoch": 33.72676056338028, "grad_norm": 1.2740873098373413, "learning_rate": 1.8e-05, "loss": 0.0226, "step": 6003 }, { "epoch": 33.732394366197184, "grad_norm": 0.5943267345428467, "learning_rate": 1.8003e-05, "loss": 0.0485, "step": 6004 }, { "epoch": 33.738028169014086, "grad_norm": 0.8610036373138428, "learning_rate": 1.8006e-05, "loss": 0.0674, "step": 6005 }, { "epoch": 33.74366197183099, "grad_norm": 0.8637344241142273, "learning_rate": 1.8009e-05, "loss": 0.0331, "step": 6006 }, { "epoch": 33.74929577464789, "grad_norm": 1.1160444021224976, "learning_rate": 1.8012000000000003e-05, "loss": 0.1125, "step": 6007 }, { "epoch": 33.754929577464786, "grad_norm": 0.46696341037750244, "learning_rate": 1.8015000000000003e-05, "loss": 0.0783, "step": 6008 }, { "epoch": 33.76056338028169, "grad_norm": 0.7257390022277832, "learning_rate": 1.8018000000000003e-05, "loss": 0.1258, "step": 6009 }, { "epoch": 33.76619718309859, "grad_norm": 0.5489681363105774, "learning_rate": 1.8021000000000002e-05, "loss": 0.0766, "step": 6010 }, { "epoch": 33.77183098591549, "grad_norm": 0.5820057392120361, "learning_rate": 1.8024e-05, "loss": 0.1302, "step": 6011 }, { "epoch": 33.777464788732395, "grad_norm": 0.6289743781089783, "learning_rate": 1.8027e-05, "loss": 0.1141, "step": 6012 }, { "epoch": 33.7830985915493, "grad_norm": 0.5949844121932983, "learning_rate": 1.803e-05, "loss": 0.08, "step": 6013 }, { "epoch": 33.7887323943662, "grad_norm": 0.5025343298912048, "learning_rate": 1.8032999999999998e-05, "loss": 0.0758, "step": 6014 }, { "epoch": 33.7943661971831, "grad_norm": 0.5828680992126465, "learning_rate": 1.8035999999999998e-05, "loss": 0.097, "step": 6015 }, { "epoch": 33.8, "grad_norm": 0.468081533908844, "learning_rate": 1.8038999999999998e-05, "loss": 0.0714, "step": 6016 }, { "epoch": 33.8056338028169, "grad_norm": 0.621192216873169, "learning_rate": 1.8042e-05, "loss": 0.1113, "step": 6017 }, { "epoch": 33.8112676056338, "grad_norm": 0.5311150550842285, "learning_rate": 1.8045e-05, "loss": 0.0492, "step": 6018 }, { "epoch": 33.816901408450704, "grad_norm": 0.9941940903663635, "learning_rate": 1.8048e-05, "loss": 0.0613, "step": 6019 }, { "epoch": 33.822535211267606, "grad_norm": 0.8756390810012817, "learning_rate": 1.8051e-05, "loss": 0.0776, "step": 6020 }, { "epoch": 33.82816901408451, "grad_norm": 0.6364657878875732, "learning_rate": 1.8054e-05, "loss": 0.0633, "step": 6021 }, { "epoch": 33.83380281690141, "grad_norm": 0.7371833324432373, "learning_rate": 1.8057e-05, "loss": 0.0691, "step": 6022 }, { "epoch": 33.83943661971831, "grad_norm": 0.5794140696525574, "learning_rate": 1.806e-05, "loss": 0.0793, "step": 6023 }, { "epoch": 33.84507042253521, "grad_norm": 0.712140679359436, "learning_rate": 1.8063e-05, "loss": 0.0387, "step": 6024 }, { "epoch": 33.85070422535211, "grad_norm": 0.7415447235107422, "learning_rate": 1.8066e-05, "loss": 0.0517, "step": 6025 }, { "epoch": 33.85633802816901, "grad_norm": 0.38708269596099854, "learning_rate": 1.8069e-05, "loss": 0.0215, "step": 6026 }, { "epoch": 33.861971830985915, "grad_norm": 0.4010728895664215, "learning_rate": 1.8072000000000002e-05, "loss": 0.0256, "step": 6027 }, { "epoch": 33.86760563380282, "grad_norm": 0.6019390821456909, "learning_rate": 1.8075000000000002e-05, "loss": 0.0576, "step": 6028 }, { "epoch": 33.87323943661972, "grad_norm": 0.4041324853897095, "learning_rate": 1.8078000000000002e-05, "loss": 0.0244, "step": 6029 }, { "epoch": 33.87887323943662, "grad_norm": 1.099611759185791, "learning_rate": 1.8081000000000002e-05, "loss": 0.028, "step": 6030 }, { "epoch": 33.884507042253524, "grad_norm": 0.8681243658065796, "learning_rate": 1.8084e-05, "loss": 0.0539, "step": 6031 }, { "epoch": 33.89014084507042, "grad_norm": 0.5321189165115356, "learning_rate": 1.8087e-05, "loss": 0.0384, "step": 6032 }, { "epoch": 33.89577464788732, "grad_norm": 0.5626916289329529, "learning_rate": 1.809e-05, "loss": 0.0367, "step": 6033 }, { "epoch": 33.901408450704224, "grad_norm": 0.3164883553981781, "learning_rate": 1.8093e-05, "loss": 0.0163, "step": 6034 }, { "epoch": 33.907042253521126, "grad_norm": 0.6202046275138855, "learning_rate": 1.8096e-05, "loss": 0.0257, "step": 6035 }, { "epoch": 33.91267605633803, "grad_norm": 0.7213475704193115, "learning_rate": 1.8098999999999997e-05, "loss": 0.0924, "step": 6036 }, { "epoch": 33.91830985915493, "grad_norm": 0.4257470667362213, "learning_rate": 1.8102e-05, "loss": 0.0173, "step": 6037 }, { "epoch": 33.92394366197183, "grad_norm": 0.37758931517601013, "learning_rate": 1.8105e-05, "loss": 0.0191, "step": 6038 }, { "epoch": 33.929577464788736, "grad_norm": 0.3855280876159668, "learning_rate": 1.8108e-05, "loss": 0.0522, "step": 6039 }, { "epoch": 33.93521126760563, "grad_norm": 0.3692483603954315, "learning_rate": 1.8111e-05, "loss": 0.0168, "step": 6040 }, { "epoch": 33.94084507042253, "grad_norm": 0.44706714153289795, "learning_rate": 1.8114e-05, "loss": 0.0176, "step": 6041 }, { "epoch": 33.946478873239435, "grad_norm": 0.48091545701026917, "learning_rate": 1.8117e-05, "loss": 0.0249, "step": 6042 }, { "epoch": 33.95211267605634, "grad_norm": 1.809732437133789, "learning_rate": 1.812e-05, "loss": 0.1111, "step": 6043 }, { "epoch": 33.95774647887324, "grad_norm": 0.6725586652755737, "learning_rate": 1.8123e-05, "loss": 0.0122, "step": 6044 }, { "epoch": 33.96338028169014, "grad_norm": 0.39014101028442383, "learning_rate": 1.8126e-05, "loss": 0.0268, "step": 6045 }, { "epoch": 33.969014084507045, "grad_norm": 0.4074663519859314, "learning_rate": 1.8129e-05, "loss": 0.0493, "step": 6046 }, { "epoch": 33.97464788732395, "grad_norm": 0.746088981628418, "learning_rate": 1.8132000000000002e-05, "loss": 0.0192, "step": 6047 }, { "epoch": 33.98028169014084, "grad_norm": 0.6625855565071106, "learning_rate": 1.8135000000000002e-05, "loss": 0.0463, "step": 6048 }, { "epoch": 33.985915492957744, "grad_norm": 0.33565446734428406, "learning_rate": 1.8138e-05, "loss": 0.0092, "step": 6049 }, { "epoch": 33.99154929577465, "grad_norm": 0.46764931082725525, "learning_rate": 1.8141e-05, "loss": 0.0114, "step": 6050 }, { "epoch": 33.99718309859155, "grad_norm": 0.4951019585132599, "learning_rate": 1.8144e-05, "loss": 0.0531, "step": 6051 }, { "epoch": 34.0, "grad_norm": 0.2843382954597473, "learning_rate": 1.8147e-05, "loss": 0.0065, "step": 6052 }, { "epoch": 34.0056338028169, "grad_norm": 0.9155585169792175, "learning_rate": 1.815e-05, "loss": 0.1068, "step": 6053 }, { "epoch": 34.011267605633805, "grad_norm": 0.585608959197998, "learning_rate": 1.8153e-05, "loss": 0.1337, "step": 6054 }, { "epoch": 34.01690140845071, "grad_norm": 0.7345219850540161, "learning_rate": 1.8156e-05, "loss": 0.0931, "step": 6055 }, { "epoch": 34.02253521126761, "grad_norm": 0.5581880211830139, "learning_rate": 1.8159e-05, "loss": 0.119, "step": 6056 }, { "epoch": 34.028169014084504, "grad_norm": 0.4639724791049957, "learning_rate": 1.8162000000000003e-05, "loss": 0.0688, "step": 6057 }, { "epoch": 34.03380281690141, "grad_norm": 0.5017245411872864, "learning_rate": 1.8165000000000003e-05, "loss": 0.0548, "step": 6058 }, { "epoch": 34.03943661971831, "grad_norm": 0.469848096370697, "learning_rate": 1.8168000000000003e-05, "loss": 0.0486, "step": 6059 }, { "epoch": 34.04507042253521, "grad_norm": 0.6235021948814392, "learning_rate": 1.8171e-05, "loss": 0.1117, "step": 6060 }, { "epoch": 34.05070422535211, "grad_norm": 0.6541318893432617, "learning_rate": 1.8174e-05, "loss": 0.0654, "step": 6061 }, { "epoch": 34.056338028169016, "grad_norm": 0.4679739475250244, "learning_rate": 1.8177e-05, "loss": 0.0433, "step": 6062 }, { "epoch": 34.06197183098592, "grad_norm": 0.5916927456855774, "learning_rate": 1.818e-05, "loss": 0.0897, "step": 6063 }, { "epoch": 34.06760563380282, "grad_norm": 0.39478370547294617, "learning_rate": 1.8183e-05, "loss": 0.0316, "step": 6064 }, { "epoch": 34.073239436619716, "grad_norm": 0.5042543411254883, "learning_rate": 1.8186e-05, "loss": 0.1041, "step": 6065 }, { "epoch": 34.07887323943662, "grad_norm": 0.6638868451118469, "learning_rate": 1.8188999999999998e-05, "loss": 0.0346, "step": 6066 }, { "epoch": 34.08450704225352, "grad_norm": 0.701197624206543, "learning_rate": 1.8192e-05, "loss": 0.0641, "step": 6067 }, { "epoch": 34.09014084507042, "grad_norm": 0.4527539014816284, "learning_rate": 1.8195e-05, "loss": 0.0568, "step": 6068 }, { "epoch": 34.095774647887325, "grad_norm": 0.6241064667701721, "learning_rate": 1.8198e-05, "loss": 0.081, "step": 6069 }, { "epoch": 34.10140845070423, "grad_norm": 0.450066477060318, "learning_rate": 1.8201e-05, "loss": 0.0277, "step": 6070 }, { "epoch": 34.10704225352113, "grad_norm": 0.42942139506340027, "learning_rate": 1.8204e-05, "loss": 0.0295, "step": 6071 }, { "epoch": 34.11267605633803, "grad_norm": 0.40736421942710876, "learning_rate": 1.8207e-05, "loss": 0.0254, "step": 6072 }, { "epoch": 34.11830985915493, "grad_norm": 0.7218704223632812, "learning_rate": 1.821e-05, "loss": 0.0448, "step": 6073 }, { "epoch": 34.12394366197183, "grad_norm": 0.345075786113739, "learning_rate": 1.8213e-05, "loss": 0.0278, "step": 6074 }, { "epoch": 34.12957746478873, "grad_norm": 0.43969792127609253, "learning_rate": 1.8216e-05, "loss": 0.0393, "step": 6075 }, { "epoch": 34.135211267605634, "grad_norm": 0.38107427954673767, "learning_rate": 1.8219e-05, "loss": 0.0352, "step": 6076 }, { "epoch": 34.140845070422536, "grad_norm": 0.5287465453147888, "learning_rate": 1.8222000000000003e-05, "loss": 0.0272, "step": 6077 }, { "epoch": 34.14647887323944, "grad_norm": 0.8034244179725647, "learning_rate": 1.8225000000000003e-05, "loss": 0.0364, "step": 6078 }, { "epoch": 34.15211267605634, "grad_norm": 0.4183410704135895, "learning_rate": 1.8228000000000002e-05, "loss": 0.0622, "step": 6079 }, { "epoch": 34.15774647887324, "grad_norm": 0.41832780838012695, "learning_rate": 1.8231000000000002e-05, "loss": 0.025, "step": 6080 }, { "epoch": 34.16338028169014, "grad_norm": 0.4031595289707184, "learning_rate": 1.8234000000000002e-05, "loss": 0.0186, "step": 6081 }, { "epoch": 34.16901408450704, "grad_norm": 0.516957700252533, "learning_rate": 1.8237000000000002e-05, "loss": 0.0155, "step": 6082 }, { "epoch": 34.17464788732394, "grad_norm": 0.5100588798522949, "learning_rate": 1.824e-05, "loss": 0.0408, "step": 6083 }, { "epoch": 34.180281690140845, "grad_norm": 0.7516012191772461, "learning_rate": 1.8243e-05, "loss": 0.0271, "step": 6084 }, { "epoch": 34.18591549295775, "grad_norm": 0.4818188548088074, "learning_rate": 1.8245999999999998e-05, "loss": 0.0467, "step": 6085 }, { "epoch": 34.19154929577465, "grad_norm": 0.9540300965309143, "learning_rate": 1.8248999999999998e-05, "loss": 0.0493, "step": 6086 }, { "epoch": 34.19718309859155, "grad_norm": 0.6175799369812012, "learning_rate": 1.8252e-05, "loss": 0.048, "step": 6087 }, { "epoch": 34.202816901408454, "grad_norm": 0.556093156337738, "learning_rate": 1.8255e-05, "loss": 0.0223, "step": 6088 }, { "epoch": 34.20845070422535, "grad_norm": 0.4909488558769226, "learning_rate": 1.8258e-05, "loss": 0.0428, "step": 6089 }, { "epoch": 34.21408450704225, "grad_norm": 0.3737712800502777, "learning_rate": 1.8261e-05, "loss": 0.0107, "step": 6090 }, { "epoch": 34.219718309859154, "grad_norm": 0.6161342859268188, "learning_rate": 1.8264e-05, "loss": 0.0148, "step": 6091 }, { "epoch": 34.225352112676056, "grad_norm": 0.4672778844833374, "learning_rate": 1.8267e-05, "loss": 0.0206, "step": 6092 }, { "epoch": 34.23098591549296, "grad_norm": 0.40168219804763794, "learning_rate": 1.827e-05, "loss": 0.0123, "step": 6093 }, { "epoch": 34.23661971830986, "grad_norm": 0.2980515956878662, "learning_rate": 1.8273e-05, "loss": 0.0081, "step": 6094 }, { "epoch": 34.24225352112676, "grad_norm": 0.5070779919624329, "learning_rate": 1.8276e-05, "loss": 0.0283, "step": 6095 }, { "epoch": 34.247887323943665, "grad_norm": 0.383044958114624, "learning_rate": 1.8279e-05, "loss": 0.0105, "step": 6096 }, { "epoch": 34.25352112676056, "grad_norm": 1.1903542280197144, "learning_rate": 1.8282000000000002e-05, "loss": 0.128, "step": 6097 }, { "epoch": 34.25915492957746, "grad_norm": 0.5143328905105591, "learning_rate": 1.8285000000000002e-05, "loss": 0.0912, "step": 6098 }, { "epoch": 34.264788732394365, "grad_norm": 0.647408127784729, "learning_rate": 1.8288000000000002e-05, "loss": 0.0793, "step": 6099 }, { "epoch": 34.27042253521127, "grad_norm": 0.6464080214500427, "learning_rate": 1.8291e-05, "loss": 0.0915, "step": 6100 }, { "epoch": 34.27605633802817, "grad_norm": 0.704091489315033, "learning_rate": 1.8294e-05, "loss": 0.1414, "step": 6101 }, { "epoch": 34.28169014084507, "grad_norm": 0.5353231430053711, "learning_rate": 1.8297e-05, "loss": 0.0588, "step": 6102 }, { "epoch": 34.287323943661974, "grad_norm": 0.535412073135376, "learning_rate": 1.83e-05, "loss": 0.073, "step": 6103 }, { "epoch": 34.292957746478876, "grad_norm": 0.6380935311317444, "learning_rate": 1.8303e-05, "loss": 0.0579, "step": 6104 }, { "epoch": 34.29859154929577, "grad_norm": 0.4495507478713989, "learning_rate": 1.8306e-05, "loss": 0.0449, "step": 6105 }, { "epoch": 34.304225352112674, "grad_norm": 0.8480080366134644, "learning_rate": 1.8309e-05, "loss": 0.063, "step": 6106 }, { "epoch": 34.309859154929576, "grad_norm": 0.7384077310562134, "learning_rate": 1.8312000000000004e-05, "loss": 0.0891, "step": 6107 }, { "epoch": 34.31549295774648, "grad_norm": 0.6174659729003906, "learning_rate": 1.8315000000000003e-05, "loss": 0.0629, "step": 6108 }, { "epoch": 34.32112676056338, "grad_norm": 0.8926729559898376, "learning_rate": 1.8318e-05, "loss": 0.0465, "step": 6109 }, { "epoch": 34.32676056338028, "grad_norm": 0.5858306884765625, "learning_rate": 1.8321e-05, "loss": 0.076, "step": 6110 }, { "epoch": 34.332394366197185, "grad_norm": 0.6352058053016663, "learning_rate": 1.8324e-05, "loss": 0.0454, "step": 6111 }, { "epoch": 34.33802816901409, "grad_norm": 0.6318805813789368, "learning_rate": 1.8327e-05, "loss": 0.0527, "step": 6112 }, { "epoch": 34.34366197183098, "grad_norm": 0.41176891326904297, "learning_rate": 1.833e-05, "loss": 0.0276, "step": 6113 }, { "epoch": 34.349295774647885, "grad_norm": 0.3779124319553375, "learning_rate": 1.8333e-05, "loss": 0.0261, "step": 6114 }, { "epoch": 34.35492957746479, "grad_norm": 0.4922230541706085, "learning_rate": 1.8336e-05, "loss": 0.0484, "step": 6115 }, { "epoch": 34.36056338028169, "grad_norm": 0.48782438039779663, "learning_rate": 1.8339e-05, "loss": 0.0246, "step": 6116 }, { "epoch": 34.36619718309859, "grad_norm": 0.46331873536109924, "learning_rate": 1.8342e-05, "loss": 0.0493, "step": 6117 }, { "epoch": 34.371830985915494, "grad_norm": 0.6501144170761108, "learning_rate": 1.8345e-05, "loss": 0.0708, "step": 6118 }, { "epoch": 34.3774647887324, "grad_norm": 0.8352330327033997, "learning_rate": 1.8348e-05, "loss": 0.0514, "step": 6119 }, { "epoch": 34.3830985915493, "grad_norm": 0.5069112777709961, "learning_rate": 1.8351e-05, "loss": 0.0308, "step": 6120 }, { "epoch": 34.388732394366194, "grad_norm": 0.3861197233200073, "learning_rate": 1.8354e-05, "loss": 0.0169, "step": 6121 }, { "epoch": 34.394366197183096, "grad_norm": 0.4531225264072418, "learning_rate": 1.8357e-05, "loss": 0.0564, "step": 6122 }, { "epoch": 34.4, "grad_norm": 0.7433220744132996, "learning_rate": 1.836e-05, "loss": 0.032, "step": 6123 }, { "epoch": 34.4056338028169, "grad_norm": 0.5588197708129883, "learning_rate": 1.8363e-05, "loss": 0.041, "step": 6124 }, { "epoch": 34.4112676056338, "grad_norm": 0.5573118329048157, "learning_rate": 1.8366e-05, "loss": 0.0227, "step": 6125 }, { "epoch": 34.416901408450705, "grad_norm": 0.7430216073989868, "learning_rate": 1.8369e-05, "loss": 0.0398, "step": 6126 }, { "epoch": 34.42253521126761, "grad_norm": 0.38182947039604187, "learning_rate": 1.8372000000000003e-05, "loss": 0.0254, "step": 6127 }, { "epoch": 34.42816901408451, "grad_norm": 0.45653191208839417, "learning_rate": 1.8375000000000003e-05, "loss": 0.027, "step": 6128 }, { "epoch": 34.433802816901405, "grad_norm": 0.5204445719718933, "learning_rate": 1.8378000000000003e-05, "loss": 0.0357, "step": 6129 }, { "epoch": 34.43943661971831, "grad_norm": 0.49119898676872253, "learning_rate": 1.8381000000000002e-05, "loss": 0.0754, "step": 6130 }, { "epoch": 34.44507042253521, "grad_norm": 0.5466720461845398, "learning_rate": 1.8384000000000002e-05, "loss": 0.0127, "step": 6131 }, { "epoch": 34.45070422535211, "grad_norm": 0.9469252824783325, "learning_rate": 1.8387000000000002e-05, "loss": 0.0262, "step": 6132 }, { "epoch": 34.456338028169014, "grad_norm": 0.38915741443634033, "learning_rate": 1.8390000000000002e-05, "loss": 0.0211, "step": 6133 }, { "epoch": 34.46197183098592, "grad_norm": 0.5218410491943359, "learning_rate": 1.8392999999999998e-05, "loss": 0.0436, "step": 6134 }, { "epoch": 34.46760563380282, "grad_norm": 0.8307033181190491, "learning_rate": 1.8395999999999998e-05, "loss": 0.0365, "step": 6135 }, { "epoch": 34.47323943661972, "grad_norm": 0.48245003819465637, "learning_rate": 1.8398999999999998e-05, "loss": 0.0302, "step": 6136 }, { "epoch": 34.478873239436616, "grad_norm": 0.9966209530830383, "learning_rate": 1.8401999999999998e-05, "loss": 0.0163, "step": 6137 }, { "epoch": 34.48450704225352, "grad_norm": 0.4075906574726105, "learning_rate": 1.8405e-05, "loss": 0.0099, "step": 6138 }, { "epoch": 34.49014084507042, "grad_norm": 0.4154053032398224, "learning_rate": 1.8408e-05, "loss": 0.007, "step": 6139 }, { "epoch": 34.49577464788732, "grad_norm": 0.38620081543922424, "learning_rate": 1.8411e-05, "loss": 0.0103, "step": 6140 }, { "epoch": 34.501408450704226, "grad_norm": 0.9501749873161316, "learning_rate": 1.8414e-05, "loss": 0.1716, "step": 6141 }, { "epoch": 34.50704225352113, "grad_norm": 0.6590558886528015, "learning_rate": 1.8417e-05, "loss": 0.109, "step": 6142 }, { "epoch": 34.51267605633803, "grad_norm": 0.5540900230407715, "learning_rate": 1.842e-05, "loss": 0.0646, "step": 6143 }, { "epoch": 34.51830985915493, "grad_norm": 0.5035600066184998, "learning_rate": 1.8423e-05, "loss": 0.0849, "step": 6144 }, { "epoch": 34.52394366197183, "grad_norm": 0.756904125213623, "learning_rate": 1.8426e-05, "loss": 0.0988, "step": 6145 }, { "epoch": 34.52957746478873, "grad_norm": 0.6137244701385498, "learning_rate": 1.8429e-05, "loss": 0.0709, "step": 6146 }, { "epoch": 34.53521126760563, "grad_norm": 0.7401852011680603, "learning_rate": 1.8432e-05, "loss": 0.1329, "step": 6147 }, { "epoch": 34.540845070422534, "grad_norm": 0.5206907987594604, "learning_rate": 1.8435000000000002e-05, "loss": 0.0515, "step": 6148 }, { "epoch": 34.54647887323944, "grad_norm": 0.6739487648010254, "learning_rate": 1.8438000000000002e-05, "loss": 0.0664, "step": 6149 }, { "epoch": 34.55211267605634, "grad_norm": 0.4898243248462677, "learning_rate": 1.8441000000000002e-05, "loss": 0.0444, "step": 6150 }, { "epoch": 34.55774647887324, "grad_norm": 0.5940901041030884, "learning_rate": 1.8444e-05, "loss": 0.0566, "step": 6151 }, { "epoch": 34.563380281690144, "grad_norm": 0.5925102829933167, "learning_rate": 1.8447e-05, "loss": 0.0491, "step": 6152 }, { "epoch": 34.56901408450704, "grad_norm": 0.36916160583496094, "learning_rate": 1.845e-05, "loss": 0.0921, "step": 6153 }, { "epoch": 34.57464788732394, "grad_norm": 0.38008928298950195, "learning_rate": 1.8453e-05, "loss": 0.0508, "step": 6154 }, { "epoch": 34.58028169014084, "grad_norm": 0.5727157592773438, "learning_rate": 1.8456e-05, "loss": 0.0678, "step": 6155 }, { "epoch": 34.585915492957746, "grad_norm": 0.4948899447917938, "learning_rate": 1.8459e-05, "loss": 0.0384, "step": 6156 }, { "epoch": 34.59154929577465, "grad_norm": 2.2999255657196045, "learning_rate": 1.8462e-05, "loss": 0.0657, "step": 6157 }, { "epoch": 34.59718309859155, "grad_norm": 0.4459105134010315, "learning_rate": 1.8465e-05, "loss": 0.0406, "step": 6158 }, { "epoch": 34.60281690140845, "grad_norm": 0.4646996259689331, "learning_rate": 1.8468e-05, "loss": 0.0297, "step": 6159 }, { "epoch": 34.608450704225355, "grad_norm": 1.0391113758087158, "learning_rate": 1.8471e-05, "loss": 0.0504, "step": 6160 }, { "epoch": 34.61408450704225, "grad_norm": 0.4552028775215149, "learning_rate": 1.8474e-05, "loss": 0.0267, "step": 6161 }, { "epoch": 34.61971830985915, "grad_norm": 0.381195068359375, "learning_rate": 1.8477e-05, "loss": 0.0189, "step": 6162 }, { "epoch": 34.625352112676055, "grad_norm": 0.3524903357028961, "learning_rate": 1.848e-05, "loss": 0.0201, "step": 6163 }, { "epoch": 34.63098591549296, "grad_norm": 0.6546519994735718, "learning_rate": 1.8483e-05, "loss": 0.0285, "step": 6164 }, { "epoch": 34.63661971830986, "grad_norm": 0.3884595036506653, "learning_rate": 1.8486e-05, "loss": 0.0276, "step": 6165 }, { "epoch": 34.64225352112676, "grad_norm": 0.5107254385948181, "learning_rate": 1.8489e-05, "loss": 0.017, "step": 6166 }, { "epoch": 34.647887323943664, "grad_norm": 0.5944862365722656, "learning_rate": 1.8492e-05, "loss": 0.0168, "step": 6167 }, { "epoch": 34.653521126760566, "grad_norm": 0.5938628911972046, "learning_rate": 1.8495e-05, "loss": 0.0267, "step": 6168 }, { "epoch": 34.65915492957747, "grad_norm": 0.3611334562301636, "learning_rate": 1.8498e-05, "loss": 0.0347, "step": 6169 }, { "epoch": 34.66478873239436, "grad_norm": 0.5333617925643921, "learning_rate": 1.8501e-05, "loss": 0.0368, "step": 6170 }, { "epoch": 34.670422535211266, "grad_norm": 0.4879579544067383, "learning_rate": 1.8504e-05, "loss": 0.0161, "step": 6171 }, { "epoch": 34.67605633802817, "grad_norm": 0.4266453683376312, "learning_rate": 1.8507e-05, "loss": 0.067, "step": 6172 }, { "epoch": 34.68169014084507, "grad_norm": 0.5616534948348999, "learning_rate": 1.851e-05, "loss": 0.051, "step": 6173 }, { "epoch": 34.68732394366197, "grad_norm": 0.5370635986328125, "learning_rate": 1.8513e-05, "loss": 0.0277, "step": 6174 }, { "epoch": 34.692957746478875, "grad_norm": 0.3050360381603241, "learning_rate": 1.8516e-05, "loss": 0.0249, "step": 6175 }, { "epoch": 34.69859154929578, "grad_norm": 1.0823404788970947, "learning_rate": 1.8519e-05, "loss": 0.0297, "step": 6176 }, { "epoch": 34.70422535211267, "grad_norm": 0.7327588796615601, "learning_rate": 1.8522e-05, "loss": 0.0199, "step": 6177 }, { "epoch": 34.709859154929575, "grad_norm": 0.6341654658317566, "learning_rate": 1.8525000000000003e-05, "loss": 0.0285, "step": 6178 }, { "epoch": 34.71549295774648, "grad_norm": 0.632710874080658, "learning_rate": 1.8528000000000003e-05, "loss": 0.0295, "step": 6179 }, { "epoch": 34.72112676056338, "grad_norm": 0.4795566499233246, "learning_rate": 1.8531000000000003e-05, "loss": 0.0441, "step": 6180 }, { "epoch": 34.72676056338028, "grad_norm": 0.34886911511421204, "learning_rate": 1.8534000000000002e-05, "loss": 0.0084, "step": 6181 }, { "epoch": 34.732394366197184, "grad_norm": 0.6181831359863281, "learning_rate": 1.8537000000000002e-05, "loss": 0.0332, "step": 6182 }, { "epoch": 34.738028169014086, "grad_norm": 0.4863385856151581, "learning_rate": 1.854e-05, "loss": 0.0594, "step": 6183 }, { "epoch": 34.74366197183099, "grad_norm": 0.6216267347335815, "learning_rate": 1.8543e-05, "loss": 0.0373, "step": 6184 }, { "epoch": 34.74929577464789, "grad_norm": 0.6906752586364746, "learning_rate": 1.8545999999999998e-05, "loss": 0.1613, "step": 6185 }, { "epoch": 34.754929577464786, "grad_norm": 0.4505577087402344, "learning_rate": 1.8548999999999998e-05, "loss": 0.087, "step": 6186 }, { "epoch": 34.76056338028169, "grad_norm": 0.7403534054756165, "learning_rate": 1.8551999999999998e-05, "loss": 0.1106, "step": 6187 }, { "epoch": 34.76619718309859, "grad_norm": 0.65023273229599, "learning_rate": 1.8555e-05, "loss": 0.1025, "step": 6188 }, { "epoch": 34.77183098591549, "grad_norm": 0.5730586051940918, "learning_rate": 1.8558e-05, "loss": 0.093, "step": 6189 }, { "epoch": 34.777464788732395, "grad_norm": 0.6893714070320129, "learning_rate": 1.8561e-05, "loss": 0.1144, "step": 6190 }, { "epoch": 34.7830985915493, "grad_norm": 0.4758416712284088, "learning_rate": 1.8564e-05, "loss": 0.0745, "step": 6191 }, { "epoch": 34.7887323943662, "grad_norm": 0.512488067150116, "learning_rate": 1.8567e-05, "loss": 0.1203, "step": 6192 }, { "epoch": 34.7943661971831, "grad_norm": 0.8369739651679993, "learning_rate": 1.857e-05, "loss": 0.1185, "step": 6193 }, { "epoch": 34.8, "grad_norm": 0.4324810206890106, "learning_rate": 1.8573e-05, "loss": 0.04, "step": 6194 }, { "epoch": 34.8056338028169, "grad_norm": 0.5436192154884338, "learning_rate": 1.8576e-05, "loss": 0.0752, "step": 6195 }, { "epoch": 34.8112676056338, "grad_norm": 0.5381863117218018, "learning_rate": 1.8579e-05, "loss": 0.0603, "step": 6196 }, { "epoch": 34.816901408450704, "grad_norm": 0.6066027879714966, "learning_rate": 1.8582e-05, "loss": 0.0703, "step": 6197 }, { "epoch": 34.822535211267606, "grad_norm": 0.6921457648277283, "learning_rate": 1.8585000000000002e-05, "loss": 0.0617, "step": 6198 }, { "epoch": 34.82816901408451, "grad_norm": 0.6187387704849243, "learning_rate": 1.8588000000000002e-05, "loss": 0.0311, "step": 6199 }, { "epoch": 34.83380281690141, "grad_norm": 0.5564278960227966, "learning_rate": 1.8591000000000002e-05, "loss": 0.08, "step": 6200 }, { "epoch": 34.83943661971831, "grad_norm": 0.4057439863681793, "learning_rate": 1.8594000000000002e-05, "loss": 0.028, "step": 6201 }, { "epoch": 34.84507042253521, "grad_norm": 0.7050615549087524, "learning_rate": 1.8597e-05, "loss": 0.0658, "step": 6202 }, { "epoch": 34.85070422535211, "grad_norm": 0.4562961757183075, "learning_rate": 1.86e-05, "loss": 0.0369, "step": 6203 }, { "epoch": 34.85633802816901, "grad_norm": 0.5805080533027649, "learning_rate": 1.8603e-05, "loss": 0.0258, "step": 6204 }, { "epoch": 34.861971830985915, "grad_norm": 0.4683409035205841, "learning_rate": 1.8606e-05, "loss": 0.0349, "step": 6205 }, { "epoch": 34.86760563380282, "grad_norm": 0.7426484823226929, "learning_rate": 1.8609e-05, "loss": 0.04, "step": 6206 }, { "epoch": 34.87323943661972, "grad_norm": 0.49151870608329773, "learning_rate": 1.8612e-05, "loss": 0.0744, "step": 6207 }, { "epoch": 34.87887323943662, "grad_norm": 0.5015497803688049, "learning_rate": 1.8615e-05, "loss": 0.0226, "step": 6208 }, { "epoch": 34.884507042253524, "grad_norm": 0.4966681897640228, "learning_rate": 1.8618e-05, "loss": 0.0442, "step": 6209 }, { "epoch": 34.89014084507042, "grad_norm": 0.4448240101337433, "learning_rate": 1.8621e-05, "loss": 0.0269, "step": 6210 }, { "epoch": 34.89577464788732, "grad_norm": 0.6787509322166443, "learning_rate": 1.8624e-05, "loss": 0.0322, "step": 6211 }, { "epoch": 34.901408450704224, "grad_norm": 0.37652304768562317, "learning_rate": 1.8627e-05, "loss": 0.019, "step": 6212 }, { "epoch": 34.907042253521126, "grad_norm": 0.432309091091156, "learning_rate": 1.863e-05, "loss": 0.0235, "step": 6213 }, { "epoch": 34.91267605633803, "grad_norm": 0.8964616656303406, "learning_rate": 1.8633e-05, "loss": 0.0717, "step": 6214 }, { "epoch": 34.91830985915493, "grad_norm": 0.3474346995353699, "learning_rate": 1.8636e-05, "loss": 0.0145, "step": 6215 }, { "epoch": 34.92394366197183, "grad_norm": 0.5676194429397583, "learning_rate": 1.8639e-05, "loss": 0.0335, "step": 6216 }, { "epoch": 34.929577464788736, "grad_norm": 0.5494109392166138, "learning_rate": 1.8642e-05, "loss": 0.0158, "step": 6217 }, { "epoch": 34.93521126760563, "grad_norm": 0.5922806859016418, "learning_rate": 1.8645000000000002e-05, "loss": 0.0334, "step": 6218 }, { "epoch": 34.94084507042253, "grad_norm": 0.5020678043365479, "learning_rate": 1.8648000000000002e-05, "loss": 0.034, "step": 6219 }, { "epoch": 34.946478873239435, "grad_norm": 0.4484263062477112, "learning_rate": 1.8651e-05, "loss": 0.0123, "step": 6220 }, { "epoch": 34.95211267605634, "grad_norm": 0.46784278750419617, "learning_rate": 1.8654e-05, "loss": 0.0505, "step": 6221 }, { "epoch": 34.95774647887324, "grad_norm": 0.44617125391960144, "learning_rate": 1.8657e-05, "loss": 0.0149, "step": 6222 }, { "epoch": 34.96338028169014, "grad_norm": 0.48329877853393555, "learning_rate": 1.866e-05, "loss": 0.0432, "step": 6223 }, { "epoch": 34.969014084507045, "grad_norm": 0.4872075617313385, "learning_rate": 1.8663e-05, "loss": 0.0422, "step": 6224 }, { "epoch": 34.97464788732395, "grad_norm": 0.5057048797607422, "learning_rate": 1.8666e-05, "loss": 0.0141, "step": 6225 }, { "epoch": 34.98028169014084, "grad_norm": 0.6181820631027222, "learning_rate": 1.8669e-05, "loss": 0.0976, "step": 6226 }, { "epoch": 34.985915492957744, "grad_norm": 0.5703194737434387, "learning_rate": 1.8672e-05, "loss": 0.0234, "step": 6227 }, { "epoch": 34.99154929577465, "grad_norm": 1.215890645980835, "learning_rate": 1.8675000000000003e-05, "loss": 0.0183, "step": 6228 }, { "epoch": 34.99718309859155, "grad_norm": 0.8591598272323608, "learning_rate": 1.8678000000000003e-05, "loss": 0.0751, "step": 6229 }, { "epoch": 35.0, "grad_norm": 0.7697758674621582, "learning_rate": 1.8681000000000003e-05, "loss": 0.0628, "step": 6230 }, { "epoch": 35.0056338028169, "grad_norm": 0.687294602394104, "learning_rate": 1.8684000000000003e-05, "loss": 0.1362, "step": 6231 }, { "epoch": 35.011267605633805, "grad_norm": 0.7868660092353821, "learning_rate": 1.8687e-05, "loss": 0.1246, "step": 6232 }, { "epoch": 35.01690140845071, "grad_norm": 0.40882423520088196, "learning_rate": 1.869e-05, "loss": 0.0616, "step": 6233 }, { "epoch": 35.02253521126761, "grad_norm": 0.46661829948425293, "learning_rate": 1.8693e-05, "loss": 0.0765, "step": 6234 }, { "epoch": 35.028169014084504, "grad_norm": 0.5265818238258362, "learning_rate": 1.8696e-05, "loss": 0.1226, "step": 6235 }, { "epoch": 35.03380281690141, "grad_norm": 0.5097890496253967, "learning_rate": 1.8699e-05, "loss": 0.097, "step": 6236 }, { "epoch": 35.03943661971831, "grad_norm": 0.38933542370796204, "learning_rate": 1.8701999999999998e-05, "loss": 0.0703, "step": 6237 }, { "epoch": 35.04507042253521, "grad_norm": 0.4803295433521271, "learning_rate": 1.8705e-05, "loss": 0.0547, "step": 6238 }, { "epoch": 35.05070422535211, "grad_norm": 0.740332841873169, "learning_rate": 1.8708e-05, "loss": 0.0531, "step": 6239 }, { "epoch": 35.056338028169016, "grad_norm": 0.4941658675670624, "learning_rate": 1.8711e-05, "loss": 0.0598, "step": 6240 }, { "epoch": 35.06197183098592, "grad_norm": 0.7904481887817383, "learning_rate": 1.8714e-05, "loss": 0.0577, "step": 6241 }, { "epoch": 35.06760563380282, "grad_norm": 0.48438018560409546, "learning_rate": 1.8717e-05, "loss": 0.045, "step": 6242 }, { "epoch": 35.073239436619716, "grad_norm": 0.4678439497947693, "learning_rate": 1.872e-05, "loss": 0.0513, "step": 6243 }, { "epoch": 35.07887323943662, "grad_norm": 0.46378761529922485, "learning_rate": 1.8723e-05, "loss": 0.0562, "step": 6244 }, { "epoch": 35.08450704225352, "grad_norm": 0.4357222020626068, "learning_rate": 1.8726e-05, "loss": 0.037, "step": 6245 }, { "epoch": 35.09014084507042, "grad_norm": 0.47095200419425964, "learning_rate": 1.8729e-05, "loss": 0.0372, "step": 6246 }, { "epoch": 35.095774647887325, "grad_norm": 0.5457331538200378, "learning_rate": 1.8732e-05, "loss": 0.0241, "step": 6247 }, { "epoch": 35.10140845070423, "grad_norm": 0.6869837641716003, "learning_rate": 1.8735000000000003e-05, "loss": 0.0489, "step": 6248 }, { "epoch": 35.10704225352113, "grad_norm": 0.700344443321228, "learning_rate": 1.8738000000000003e-05, "loss": 0.0566, "step": 6249 }, { "epoch": 35.11267605633803, "grad_norm": 0.5699595212936401, "learning_rate": 1.8741000000000002e-05, "loss": 0.0266, "step": 6250 }, { "epoch": 35.11830985915493, "grad_norm": 0.43176114559173584, "learning_rate": 1.8744000000000002e-05, "loss": 0.024, "step": 6251 }, { "epoch": 35.12394366197183, "grad_norm": 0.3597666323184967, "learning_rate": 1.8747000000000002e-05, "loss": 0.026, "step": 6252 }, { "epoch": 35.12957746478873, "grad_norm": 0.3508414924144745, "learning_rate": 1.8750000000000002e-05, "loss": 0.0311, "step": 6253 }, { "epoch": 35.135211267605634, "grad_norm": 0.6889611482620239, "learning_rate": 1.8753e-05, "loss": 0.038, "step": 6254 }, { "epoch": 35.140845070422536, "grad_norm": 0.3502342998981476, "learning_rate": 1.8756e-05, "loss": 0.0181, "step": 6255 }, { "epoch": 35.14647887323944, "grad_norm": 0.5793746113777161, "learning_rate": 1.8759e-05, "loss": 0.0325, "step": 6256 }, { "epoch": 35.15211267605634, "grad_norm": 1.4724016189575195, "learning_rate": 1.8761999999999998e-05, "loss": 0.0203, "step": 6257 }, { "epoch": 35.15774647887324, "grad_norm": 0.48856326937675476, "learning_rate": 1.8764999999999997e-05, "loss": 0.0224, "step": 6258 }, { "epoch": 35.16338028169014, "grad_norm": 0.5912046432495117, "learning_rate": 1.8768e-05, "loss": 0.0348, "step": 6259 }, { "epoch": 35.16901408450704, "grad_norm": 0.5036078691482544, "learning_rate": 1.8771e-05, "loss": 0.0194, "step": 6260 }, { "epoch": 35.17464788732394, "grad_norm": 0.903668999671936, "learning_rate": 1.8774e-05, "loss": 0.0109, "step": 6261 }, { "epoch": 35.180281690140845, "grad_norm": 0.5701941847801208, "learning_rate": 1.8777e-05, "loss": 0.0267, "step": 6262 }, { "epoch": 35.18591549295775, "grad_norm": 0.7686889171600342, "learning_rate": 1.878e-05, "loss": 0.0896, "step": 6263 }, { "epoch": 35.19154929577465, "grad_norm": 1.23325514793396, "learning_rate": 1.8783e-05, "loss": 0.0337, "step": 6264 }, { "epoch": 35.19718309859155, "grad_norm": 1.1461437940597534, "learning_rate": 1.8786e-05, "loss": 0.0394, "step": 6265 }, { "epoch": 35.202816901408454, "grad_norm": 0.48464226722717285, "learning_rate": 1.8789e-05, "loss": 0.0305, "step": 6266 }, { "epoch": 35.20845070422535, "grad_norm": 0.384177029132843, "learning_rate": 1.8792e-05, "loss": 0.0246, "step": 6267 }, { "epoch": 35.21408450704225, "grad_norm": 0.3981240689754486, "learning_rate": 1.8795e-05, "loss": 0.0103, "step": 6268 }, { "epoch": 35.219718309859154, "grad_norm": 0.2803480923175812, "learning_rate": 1.8798000000000002e-05, "loss": 0.0099, "step": 6269 }, { "epoch": 35.225352112676056, "grad_norm": 0.5057356357574463, "learning_rate": 1.8801000000000002e-05, "loss": 0.0441, "step": 6270 }, { "epoch": 35.23098591549296, "grad_norm": 0.6375514268875122, "learning_rate": 1.8804e-05, "loss": 0.0275, "step": 6271 }, { "epoch": 35.23661971830986, "grad_norm": 0.5066351890563965, "learning_rate": 1.8807e-05, "loss": 0.0105, "step": 6272 }, { "epoch": 35.24225352112676, "grad_norm": 0.5587806701660156, "learning_rate": 1.881e-05, "loss": 0.0574, "step": 6273 }, { "epoch": 35.247887323943665, "grad_norm": 0.9124591946601868, "learning_rate": 1.8813e-05, "loss": 0.0169, "step": 6274 }, { "epoch": 35.25352112676056, "grad_norm": 0.7930647134780884, "learning_rate": 1.8816e-05, "loss": 0.1906, "step": 6275 }, { "epoch": 35.25915492957746, "grad_norm": 0.604011595249176, "learning_rate": 1.8819e-05, "loss": 0.0779, "step": 6276 }, { "epoch": 35.264788732394365, "grad_norm": 0.6046846508979797, "learning_rate": 1.8822e-05, "loss": 0.1024, "step": 6277 }, { "epoch": 35.27042253521127, "grad_norm": 0.6085085868835449, "learning_rate": 1.8825e-05, "loss": 0.0892, "step": 6278 }, { "epoch": 35.27605633802817, "grad_norm": 0.5956141352653503, "learning_rate": 1.8828000000000003e-05, "loss": 0.1213, "step": 6279 }, { "epoch": 35.28169014084507, "grad_norm": 0.43994706869125366, "learning_rate": 1.8831000000000003e-05, "loss": 0.044, "step": 6280 }, { "epoch": 35.287323943661974, "grad_norm": 0.5393202900886536, "learning_rate": 1.8834e-05, "loss": 0.053, "step": 6281 }, { "epoch": 35.292957746478876, "grad_norm": 0.48033517599105835, "learning_rate": 1.8837e-05, "loss": 0.0695, "step": 6282 }, { "epoch": 35.29859154929577, "grad_norm": 0.5399298667907715, "learning_rate": 1.884e-05, "loss": 0.0567, "step": 6283 }, { "epoch": 35.304225352112674, "grad_norm": 0.4637104272842407, "learning_rate": 1.8843e-05, "loss": 0.0467, "step": 6284 }, { "epoch": 35.309859154929576, "grad_norm": 0.486113041639328, "learning_rate": 1.8846e-05, "loss": 0.0511, "step": 6285 }, { "epoch": 35.31549295774648, "grad_norm": 0.5356094837188721, "learning_rate": 1.8849e-05, "loss": 0.0432, "step": 6286 }, { "epoch": 35.32112676056338, "grad_norm": 0.5509976148605347, "learning_rate": 1.8852e-05, "loss": 0.0341, "step": 6287 }, { "epoch": 35.32676056338028, "grad_norm": 0.7316088080406189, "learning_rate": 1.8854999999999998e-05, "loss": 0.0824, "step": 6288 }, { "epoch": 35.332394366197185, "grad_norm": 0.5204934477806091, "learning_rate": 1.8858e-05, "loss": 0.0707, "step": 6289 }, { "epoch": 35.33802816901409, "grad_norm": 0.47097331285476685, "learning_rate": 1.8861e-05, "loss": 0.0279, "step": 6290 }, { "epoch": 35.34366197183098, "grad_norm": 0.4125727415084839, "learning_rate": 1.8864e-05, "loss": 0.0531, "step": 6291 }, { "epoch": 35.349295774647885, "grad_norm": 0.502547562122345, "learning_rate": 1.8867e-05, "loss": 0.0338, "step": 6292 }, { "epoch": 35.35492957746479, "grad_norm": 1.1317648887634277, "learning_rate": 1.887e-05, "loss": 0.0304, "step": 6293 }, { "epoch": 35.36056338028169, "grad_norm": 0.4424722194671631, "learning_rate": 1.8873e-05, "loss": 0.0251, "step": 6294 }, { "epoch": 35.36619718309859, "grad_norm": 0.41200390458106995, "learning_rate": 1.8876e-05, "loss": 0.024, "step": 6295 }, { "epoch": 35.371830985915494, "grad_norm": 0.4836016893386841, "learning_rate": 1.8879e-05, "loss": 0.0328, "step": 6296 }, { "epoch": 35.3774647887324, "grad_norm": 0.836081862449646, "learning_rate": 1.8882e-05, "loss": 0.0723, "step": 6297 }, { "epoch": 35.3830985915493, "grad_norm": 0.6800190210342407, "learning_rate": 1.8885e-05, "loss": 0.0285, "step": 6298 }, { "epoch": 35.388732394366194, "grad_norm": 0.6562647223472595, "learning_rate": 1.8888000000000003e-05, "loss": 0.0222, "step": 6299 }, { "epoch": 35.394366197183096, "grad_norm": 0.4599554240703583, "learning_rate": 1.8891000000000003e-05, "loss": 0.0337, "step": 6300 }, { "epoch": 35.4, "grad_norm": 0.481381356716156, "learning_rate": 1.8894000000000002e-05, "loss": 0.0741, "step": 6301 }, { "epoch": 35.4056338028169, "grad_norm": 0.3885584771633148, "learning_rate": 1.8897000000000002e-05, "loss": 0.0208, "step": 6302 }, { "epoch": 35.4112676056338, "grad_norm": 0.33297741413116455, "learning_rate": 1.8900000000000002e-05, "loss": 0.0159, "step": 6303 }, { "epoch": 35.416901408450705, "grad_norm": 0.40797358751296997, "learning_rate": 1.8903000000000002e-05, "loss": 0.0295, "step": 6304 }, { "epoch": 35.42253521126761, "grad_norm": 0.3889458179473877, "learning_rate": 1.8906e-05, "loss": 0.0149, "step": 6305 }, { "epoch": 35.42816901408451, "grad_norm": 0.4039365351200104, "learning_rate": 1.8908999999999998e-05, "loss": 0.019, "step": 6306 }, { "epoch": 35.433802816901405, "grad_norm": 0.4015592038631439, "learning_rate": 1.8911999999999998e-05, "loss": 0.0381, "step": 6307 }, { "epoch": 35.43943661971831, "grad_norm": 0.4503590166568756, "learning_rate": 1.8914999999999998e-05, "loss": 0.0451, "step": 6308 }, { "epoch": 35.44507042253521, "grad_norm": 0.3940379321575165, "learning_rate": 1.8918e-05, "loss": 0.0324, "step": 6309 }, { "epoch": 35.45070422535211, "grad_norm": 1.184687852859497, "learning_rate": 1.8921e-05, "loss": 0.0246, "step": 6310 }, { "epoch": 35.456338028169014, "grad_norm": 0.4415408670902252, "learning_rate": 1.8924e-05, "loss": 0.0166, "step": 6311 }, { "epoch": 35.46197183098592, "grad_norm": 0.553721010684967, "learning_rate": 1.8927e-05, "loss": 0.0221, "step": 6312 }, { "epoch": 35.46760563380282, "grad_norm": 0.8270503282546997, "learning_rate": 1.893e-05, "loss": 0.0427, "step": 6313 }, { "epoch": 35.47323943661972, "grad_norm": 0.7246884107589722, "learning_rate": 1.8933e-05, "loss": 0.0618, "step": 6314 }, { "epoch": 35.478873239436616, "grad_norm": 0.4125763773918152, "learning_rate": 1.8936e-05, "loss": 0.0073, "step": 6315 }, { "epoch": 35.48450704225352, "grad_norm": 0.41136372089385986, "learning_rate": 1.8939e-05, "loss": 0.0257, "step": 6316 }, { "epoch": 35.49014084507042, "grad_norm": 0.3352031111717224, "learning_rate": 1.8942e-05, "loss": 0.0296, "step": 6317 }, { "epoch": 35.49577464788732, "grad_norm": 0.7276496887207031, "learning_rate": 1.8945e-05, "loss": 0.0429, "step": 6318 }, { "epoch": 35.501408450704226, "grad_norm": 0.5894948244094849, "learning_rate": 1.8948000000000002e-05, "loss": 0.1374, "step": 6319 }, { "epoch": 35.50704225352113, "grad_norm": 0.5944612622261047, "learning_rate": 1.8951000000000002e-05, "loss": 0.0857, "step": 6320 }, { "epoch": 35.51267605633803, "grad_norm": 0.6679982542991638, "learning_rate": 1.8954000000000002e-05, "loss": 0.072, "step": 6321 }, { "epoch": 35.51830985915493, "grad_norm": 0.6501718163490295, "learning_rate": 1.8957e-05, "loss": 0.107, "step": 6322 }, { "epoch": 35.52394366197183, "grad_norm": 0.4600994884967804, "learning_rate": 1.896e-05, "loss": 0.0577, "step": 6323 }, { "epoch": 35.52957746478873, "grad_norm": 0.6243997812271118, "learning_rate": 1.8963e-05, "loss": 0.0984, "step": 6324 }, { "epoch": 35.53521126760563, "grad_norm": 0.5668584704399109, "learning_rate": 1.8966e-05, "loss": 0.1532, "step": 6325 }, { "epoch": 35.540845070422534, "grad_norm": 0.5723873972892761, "learning_rate": 1.8969e-05, "loss": 0.1066, "step": 6326 }, { "epoch": 35.54647887323944, "grad_norm": 0.5691709518432617, "learning_rate": 1.8972e-05, "loss": 0.1001, "step": 6327 }, { "epoch": 35.55211267605634, "grad_norm": 0.5294475555419922, "learning_rate": 1.8975e-05, "loss": 0.035, "step": 6328 }, { "epoch": 35.55774647887324, "grad_norm": 0.46460625529289246, "learning_rate": 1.8978000000000004e-05, "loss": 0.0464, "step": 6329 }, { "epoch": 35.563380281690144, "grad_norm": 0.4911517798900604, "learning_rate": 1.8981e-05, "loss": 0.0431, "step": 6330 }, { "epoch": 35.56901408450704, "grad_norm": 0.5646249055862427, "learning_rate": 1.8984e-05, "loss": 0.1087, "step": 6331 }, { "epoch": 35.57464788732394, "grad_norm": 0.5459504127502441, "learning_rate": 1.8987e-05, "loss": 0.0974, "step": 6332 }, { "epoch": 35.58028169014084, "grad_norm": 0.5167734026908875, "learning_rate": 1.899e-05, "loss": 0.0379, "step": 6333 }, { "epoch": 35.585915492957746, "grad_norm": 0.5917697548866272, "learning_rate": 1.8993e-05, "loss": 0.0585, "step": 6334 }, { "epoch": 35.59154929577465, "grad_norm": 0.5703282952308655, "learning_rate": 1.8996e-05, "loss": 0.0541, "step": 6335 }, { "epoch": 35.59718309859155, "grad_norm": 0.5199267864227295, "learning_rate": 1.8999e-05, "loss": 0.0274, "step": 6336 }, { "epoch": 35.60281690140845, "grad_norm": 0.5479579567909241, "learning_rate": 1.9002e-05, "loss": 0.0392, "step": 6337 }, { "epoch": 35.608450704225355, "grad_norm": 0.6324604153633118, "learning_rate": 1.9005e-05, "loss": 0.0379, "step": 6338 }, { "epoch": 35.61408450704225, "grad_norm": 2.0806379318237305, "learning_rate": 1.9008e-05, "loss": 0.0396, "step": 6339 }, { "epoch": 35.61971830985915, "grad_norm": 0.5432907342910767, "learning_rate": 1.9011e-05, "loss": 0.0277, "step": 6340 }, { "epoch": 35.625352112676055, "grad_norm": 1.3884131908416748, "learning_rate": 1.9014e-05, "loss": 0.0314, "step": 6341 }, { "epoch": 35.63098591549296, "grad_norm": 0.31462958455085754, "learning_rate": 1.9017e-05, "loss": 0.0206, "step": 6342 }, { "epoch": 35.63661971830986, "grad_norm": 0.45122459530830383, "learning_rate": 1.902e-05, "loss": 0.0178, "step": 6343 }, { "epoch": 35.64225352112676, "grad_norm": 0.605609118938446, "learning_rate": 1.9023e-05, "loss": 0.046, "step": 6344 }, { "epoch": 35.647887323943664, "grad_norm": 0.5712015628814697, "learning_rate": 1.9026e-05, "loss": 0.0331, "step": 6345 }, { "epoch": 35.653521126760566, "grad_norm": 0.424485981464386, "learning_rate": 1.9029e-05, "loss": 0.0185, "step": 6346 }, { "epoch": 35.65915492957747, "grad_norm": 0.4567093551158905, "learning_rate": 1.9032e-05, "loss": 0.0168, "step": 6347 }, { "epoch": 35.66478873239436, "grad_norm": 0.6167874336242676, "learning_rate": 1.9035e-05, "loss": 0.047, "step": 6348 }, { "epoch": 35.670422535211266, "grad_norm": 0.46009448170661926, "learning_rate": 1.9038000000000003e-05, "loss": 0.0415, "step": 6349 }, { "epoch": 35.67605633802817, "grad_norm": 0.48912107944488525, "learning_rate": 1.9041000000000003e-05, "loss": 0.0575, "step": 6350 }, { "epoch": 35.68169014084507, "grad_norm": 1.4385826587677002, "learning_rate": 1.9044000000000003e-05, "loss": 0.0517, "step": 6351 }, { "epoch": 35.68732394366197, "grad_norm": 0.40414538979530334, "learning_rate": 1.9047000000000002e-05, "loss": 0.0588, "step": 6352 }, { "epoch": 35.692957746478875, "grad_norm": 0.5084457397460938, "learning_rate": 1.9050000000000002e-05, "loss": 0.0143, "step": 6353 }, { "epoch": 35.69859154929578, "grad_norm": 0.5356064438819885, "learning_rate": 1.9053000000000002e-05, "loss": 0.0403, "step": 6354 }, { "epoch": 35.70422535211267, "grad_norm": 0.4586878716945648, "learning_rate": 1.9056e-05, "loss": 0.0769, "step": 6355 }, { "epoch": 35.709859154929575, "grad_norm": 0.5808048248291016, "learning_rate": 1.9058999999999998e-05, "loss": 0.01, "step": 6356 }, { "epoch": 35.71549295774648, "grad_norm": 0.3570108115673065, "learning_rate": 1.9061999999999998e-05, "loss": 0.0089, "step": 6357 }, { "epoch": 35.72112676056338, "grad_norm": 0.6617296934127808, "learning_rate": 1.9064999999999998e-05, "loss": 0.0133, "step": 6358 }, { "epoch": 35.72676056338028, "grad_norm": 0.8042421936988831, "learning_rate": 1.9068e-05, "loss": 0.0179, "step": 6359 }, { "epoch": 35.732394366197184, "grad_norm": 0.44284889101982117, "learning_rate": 1.9071e-05, "loss": 0.0446, "step": 6360 }, { "epoch": 35.738028169014086, "grad_norm": 0.4220072627067566, "learning_rate": 1.9074e-05, "loss": 0.0137, "step": 6361 }, { "epoch": 35.74366197183099, "grad_norm": 0.6122591495513916, "learning_rate": 1.9077e-05, "loss": 0.0254, "step": 6362 }, { "epoch": 35.74929577464789, "grad_norm": 0.8536486625671387, "learning_rate": 1.908e-05, "loss": 0.1113, "step": 6363 }, { "epoch": 35.754929577464786, "grad_norm": 0.6631886959075928, "learning_rate": 1.9083e-05, "loss": 0.0765, "step": 6364 }, { "epoch": 35.76056338028169, "grad_norm": 0.5011869072914124, "learning_rate": 1.9086e-05, "loss": 0.0673, "step": 6365 }, { "epoch": 35.76619718309859, "grad_norm": 0.612023115158081, "learning_rate": 1.9089e-05, "loss": 0.0739, "step": 6366 }, { "epoch": 35.77183098591549, "grad_norm": 0.5455380082130432, "learning_rate": 1.9092e-05, "loss": 0.067, "step": 6367 }, { "epoch": 35.777464788732395, "grad_norm": 0.543450653553009, "learning_rate": 1.9095e-05, "loss": 0.0531, "step": 6368 }, { "epoch": 35.7830985915493, "grad_norm": 0.72675621509552, "learning_rate": 1.9098000000000002e-05, "loss": 0.0555, "step": 6369 }, { "epoch": 35.7887323943662, "grad_norm": 0.618101954460144, "learning_rate": 1.9101000000000002e-05, "loss": 0.1113, "step": 6370 }, { "epoch": 35.7943661971831, "grad_norm": 0.5905442833900452, "learning_rate": 1.9104000000000002e-05, "loss": 0.0569, "step": 6371 }, { "epoch": 35.8, "grad_norm": 0.8940340876579285, "learning_rate": 1.9107000000000002e-05, "loss": 0.1332, "step": 6372 }, { "epoch": 35.8056338028169, "grad_norm": 0.5381177067756653, "learning_rate": 1.911e-05, "loss": 0.0444, "step": 6373 }, { "epoch": 35.8112676056338, "grad_norm": 0.5147808194160461, "learning_rate": 1.9113e-05, "loss": 0.0535, "step": 6374 }, { "epoch": 35.816901408450704, "grad_norm": 0.6261820197105408, "learning_rate": 1.9116e-05, "loss": 0.0492, "step": 6375 }, { "epoch": 35.822535211267606, "grad_norm": 0.5733263492584229, "learning_rate": 1.9119e-05, "loss": 0.0306, "step": 6376 }, { "epoch": 35.82816901408451, "grad_norm": 0.5498674511909485, "learning_rate": 1.9122e-05, "loss": 0.0974, "step": 6377 }, { "epoch": 35.83380281690141, "grad_norm": 0.5186818242073059, "learning_rate": 1.9125e-05, "loss": 0.0332, "step": 6378 }, { "epoch": 35.83943661971831, "grad_norm": 0.43126821517944336, "learning_rate": 1.9128e-05, "loss": 0.0618, "step": 6379 }, { "epoch": 35.84507042253521, "grad_norm": 0.4959389567375183, "learning_rate": 1.9131e-05, "loss": 0.0342, "step": 6380 }, { "epoch": 35.85070422535211, "grad_norm": 0.5262899994850159, "learning_rate": 1.9134e-05, "loss": 0.0173, "step": 6381 }, { "epoch": 35.85633802816901, "grad_norm": 0.39847442507743835, "learning_rate": 1.9137e-05, "loss": 0.0368, "step": 6382 }, { "epoch": 35.861971830985915, "grad_norm": 0.6212478280067444, "learning_rate": 1.914e-05, "loss": 0.0735, "step": 6383 }, { "epoch": 35.86760563380282, "grad_norm": 0.6689456105232239, "learning_rate": 1.9143e-05, "loss": 0.0344, "step": 6384 }, { "epoch": 35.87323943661972, "grad_norm": 0.5062025189399719, "learning_rate": 1.9146e-05, "loss": 0.0517, "step": 6385 }, { "epoch": 35.87887323943662, "grad_norm": 0.42089125514030457, "learning_rate": 1.9149e-05, "loss": 0.0126, "step": 6386 }, { "epoch": 35.884507042253524, "grad_norm": 0.5884944796562195, "learning_rate": 1.9152e-05, "loss": 0.0614, "step": 6387 }, { "epoch": 35.89014084507042, "grad_norm": 0.4646969139575958, "learning_rate": 1.9155e-05, "loss": 0.0218, "step": 6388 }, { "epoch": 35.89577464788732, "grad_norm": 0.5123420357704163, "learning_rate": 1.9158e-05, "loss": 0.0281, "step": 6389 }, { "epoch": 35.901408450704224, "grad_norm": 0.37423408031463623, "learning_rate": 1.9161000000000002e-05, "loss": 0.0207, "step": 6390 }, { "epoch": 35.907042253521126, "grad_norm": 0.53504478931427, "learning_rate": 1.9164e-05, "loss": 0.0262, "step": 6391 }, { "epoch": 35.91267605633803, "grad_norm": 0.6429203748703003, "learning_rate": 1.9167e-05, "loss": 0.0552, "step": 6392 }, { "epoch": 35.91830985915493, "grad_norm": 0.48035523295402527, "learning_rate": 1.917e-05, "loss": 0.0285, "step": 6393 }, { "epoch": 35.92394366197183, "grad_norm": 0.39335721731185913, "learning_rate": 1.9173e-05, "loss": 0.0114, "step": 6394 }, { "epoch": 35.929577464788736, "grad_norm": 0.6425526142120361, "learning_rate": 1.9176e-05, "loss": 0.0286, "step": 6395 }, { "epoch": 35.93521126760563, "grad_norm": 0.6734095215797424, "learning_rate": 1.9179e-05, "loss": 0.0212, "step": 6396 }, { "epoch": 35.94084507042253, "grad_norm": 0.35762253403663635, "learning_rate": 1.9182e-05, "loss": 0.0352, "step": 6397 }, { "epoch": 35.946478873239435, "grad_norm": 0.4970749318599701, "learning_rate": 1.9185e-05, "loss": 0.0152, "step": 6398 }, { "epoch": 35.95211267605634, "grad_norm": 0.5658286213874817, "learning_rate": 1.9188e-05, "loss": 0.0214, "step": 6399 }, { "epoch": 35.95774647887324, "grad_norm": 0.3260786533355713, "learning_rate": 1.9191000000000003e-05, "loss": 0.0098, "step": 6400 }, { "epoch": 35.96338028169014, "grad_norm": 0.5064437389373779, "learning_rate": 1.9194000000000003e-05, "loss": 0.0248, "step": 6401 }, { "epoch": 35.969014084507045, "grad_norm": 0.5487016439437866, "learning_rate": 1.9197000000000003e-05, "loss": 0.0331, "step": 6402 }, { "epoch": 35.97464788732395, "grad_norm": 0.44836851954460144, "learning_rate": 1.9200000000000003e-05, "loss": 0.0058, "step": 6403 }, { "epoch": 35.98028169014084, "grad_norm": 0.496773898601532, "learning_rate": 1.9203e-05, "loss": 0.0101, "step": 6404 }, { "epoch": 35.985915492957744, "grad_norm": 0.5388875603675842, "learning_rate": 1.9206e-05, "loss": 0.0265, "step": 6405 }, { "epoch": 35.99154929577465, "grad_norm": 0.4907063841819763, "learning_rate": 1.9209e-05, "loss": 0.0178, "step": 6406 }, { "epoch": 35.99718309859155, "grad_norm": 0.47567805647850037, "learning_rate": 1.9212e-05, "loss": 0.0394, "step": 6407 }, { "epoch": 36.0, "grad_norm": 0.3910619616508484, "learning_rate": 1.9214999999999998e-05, "loss": 0.0047, "step": 6408 }, { "epoch": 36.0056338028169, "grad_norm": 0.613671064376831, "learning_rate": 1.9217999999999998e-05, "loss": 0.069, "step": 6409 }, { "epoch": 36.011267605633805, "grad_norm": 0.48231780529022217, "learning_rate": 1.9221e-05, "loss": 0.0684, "step": 6410 }, { "epoch": 36.01690140845071, "grad_norm": 0.531242847442627, "learning_rate": 1.9224e-05, "loss": 0.0601, "step": 6411 }, { "epoch": 36.02253521126761, "grad_norm": 0.5043355822563171, "learning_rate": 1.9227e-05, "loss": 0.0758, "step": 6412 }, { "epoch": 36.028169014084504, "grad_norm": 0.458281546831131, "learning_rate": 1.923e-05, "loss": 0.0867, "step": 6413 }, { "epoch": 36.03380281690141, "grad_norm": 0.5578661561012268, "learning_rate": 1.9233e-05, "loss": 0.0923, "step": 6414 }, { "epoch": 36.03943661971831, "grad_norm": 0.5183669924736023, "learning_rate": 1.9236e-05, "loss": 0.0552, "step": 6415 }, { "epoch": 36.04507042253521, "grad_norm": 0.5858542919158936, "learning_rate": 1.9239e-05, "loss": 0.0538, "step": 6416 }, { "epoch": 36.05070422535211, "grad_norm": 0.5886787176132202, "learning_rate": 1.9242e-05, "loss": 0.1355, "step": 6417 }, { "epoch": 36.056338028169016, "grad_norm": 0.48820826411247253, "learning_rate": 1.9245e-05, "loss": 0.047, "step": 6418 }, { "epoch": 36.06197183098592, "grad_norm": 0.6349949836730957, "learning_rate": 1.9248e-05, "loss": 0.0837, "step": 6419 }, { "epoch": 36.06760563380282, "grad_norm": 0.39194297790527344, "learning_rate": 1.9251000000000003e-05, "loss": 0.0346, "step": 6420 }, { "epoch": 36.073239436619716, "grad_norm": 0.7159105539321899, "learning_rate": 1.9254000000000002e-05, "loss": 0.0457, "step": 6421 }, { "epoch": 36.07887323943662, "grad_norm": 0.37575262784957886, "learning_rate": 1.9257000000000002e-05, "loss": 0.045, "step": 6422 }, { "epoch": 36.08450704225352, "grad_norm": 0.6587660312652588, "learning_rate": 1.9260000000000002e-05, "loss": 0.0277, "step": 6423 }, { "epoch": 36.09014084507042, "grad_norm": 0.5482978224754333, "learning_rate": 1.9263000000000002e-05, "loss": 0.1048, "step": 6424 }, { "epoch": 36.095774647887325, "grad_norm": 0.3677675127983093, "learning_rate": 1.9266e-05, "loss": 0.0315, "step": 6425 }, { "epoch": 36.10140845070423, "grad_norm": 0.2995437979698181, "learning_rate": 1.9269e-05, "loss": 0.0205, "step": 6426 }, { "epoch": 36.10704225352113, "grad_norm": 0.8793317675590515, "learning_rate": 1.9272e-05, "loss": 0.0405, "step": 6427 }, { "epoch": 36.11267605633803, "grad_norm": 0.5597925186157227, "learning_rate": 1.9275e-05, "loss": 0.0269, "step": 6428 }, { "epoch": 36.11830985915493, "grad_norm": 0.501958429813385, "learning_rate": 1.9277999999999997e-05, "loss": 0.0304, "step": 6429 }, { "epoch": 36.12394366197183, "grad_norm": 0.38171496987342834, "learning_rate": 1.9281e-05, "loss": 0.0186, "step": 6430 }, { "epoch": 36.12957746478873, "grad_norm": 0.4390154480934143, "learning_rate": 1.9284e-05, "loss": 0.0161, "step": 6431 }, { "epoch": 36.135211267605634, "grad_norm": 0.4183507561683655, "learning_rate": 1.9287e-05, "loss": 0.0323, "step": 6432 }, { "epoch": 36.140845070422536, "grad_norm": 0.505552351474762, "learning_rate": 1.929e-05, "loss": 0.0326, "step": 6433 }, { "epoch": 36.14647887323944, "grad_norm": 0.5285773873329163, "learning_rate": 1.9293e-05, "loss": 0.0262, "step": 6434 }, { "epoch": 36.15211267605634, "grad_norm": 0.3587162494659424, "learning_rate": 1.9296e-05, "loss": 0.0133, "step": 6435 }, { "epoch": 36.15774647887324, "grad_norm": 0.3302503228187561, "learning_rate": 1.9299e-05, "loss": 0.0162, "step": 6436 }, { "epoch": 36.16338028169014, "grad_norm": 0.37287384271621704, "learning_rate": 1.9302e-05, "loss": 0.0136, "step": 6437 }, { "epoch": 36.16901408450704, "grad_norm": 0.39075687527656555, "learning_rate": 1.9305e-05, "loss": 0.0234, "step": 6438 }, { "epoch": 36.17464788732394, "grad_norm": 0.35486143827438354, "learning_rate": 1.9308e-05, "loss": 0.0092, "step": 6439 }, { "epoch": 36.180281690140845, "grad_norm": 0.6902173757553101, "learning_rate": 1.9311000000000002e-05, "loss": 0.0311, "step": 6440 }, { "epoch": 36.18591549295775, "grad_norm": 0.6819069385528564, "learning_rate": 1.9314000000000002e-05, "loss": 0.0338, "step": 6441 }, { "epoch": 36.19154929577465, "grad_norm": 0.3447396159172058, "learning_rate": 1.9317e-05, "loss": 0.0653, "step": 6442 }, { "epoch": 36.19718309859155, "grad_norm": 0.6431465148925781, "learning_rate": 1.932e-05, "loss": 0.0332, "step": 6443 }, { "epoch": 36.202816901408454, "grad_norm": 0.42428621649742126, "learning_rate": 1.9323e-05, "loss": 0.0161, "step": 6444 }, { "epoch": 36.20845070422535, "grad_norm": 0.4569056034088135, "learning_rate": 1.9326e-05, "loss": 0.0123, "step": 6445 }, { "epoch": 36.21408450704225, "grad_norm": 0.4113379716873169, "learning_rate": 1.9329e-05, "loss": 0.0133, "step": 6446 }, { "epoch": 36.219718309859154, "grad_norm": 0.7627661228179932, "learning_rate": 1.9332e-05, "loss": 0.0394, "step": 6447 }, { "epoch": 36.225352112676056, "grad_norm": 0.42209726572036743, "learning_rate": 1.9335e-05, "loss": 0.0238, "step": 6448 }, { "epoch": 36.23098591549296, "grad_norm": 0.24060913920402527, "learning_rate": 1.9338e-05, "loss": 0.0054, "step": 6449 }, { "epoch": 36.23661971830986, "grad_norm": 0.952965497970581, "learning_rate": 1.9341000000000003e-05, "loss": 0.0451, "step": 6450 }, { "epoch": 36.24225352112676, "grad_norm": 0.5637022256851196, "learning_rate": 1.9344000000000003e-05, "loss": 0.014, "step": 6451 }, { "epoch": 36.247887323943665, "grad_norm": 0.7011445164680481, "learning_rate": 1.9347000000000003e-05, "loss": 0.0252, "step": 6452 }, { "epoch": 36.25352112676056, "grad_norm": 0.7925795912742615, "learning_rate": 1.935e-05, "loss": 0.1734, "step": 6453 }, { "epoch": 36.25915492957746, "grad_norm": 0.8412101864814758, "learning_rate": 1.9353e-05, "loss": 0.0913, "step": 6454 }, { "epoch": 36.264788732394365, "grad_norm": 0.5987395644187927, "learning_rate": 1.9356e-05, "loss": 0.0887, "step": 6455 }, { "epoch": 36.27042253521127, "grad_norm": 0.46663257479667664, "learning_rate": 1.9359e-05, "loss": 0.0671, "step": 6456 }, { "epoch": 36.27605633802817, "grad_norm": 0.7351412773132324, "learning_rate": 1.9362e-05, "loss": 0.0914, "step": 6457 }, { "epoch": 36.28169014084507, "grad_norm": 0.6256861686706543, "learning_rate": 1.9365e-05, "loss": 0.0933, "step": 6458 }, { "epoch": 36.287323943661974, "grad_norm": 0.532701313495636, "learning_rate": 1.9367999999999998e-05, "loss": 0.071, "step": 6459 }, { "epoch": 36.292957746478876, "grad_norm": 0.610520601272583, "learning_rate": 1.9371e-05, "loss": 0.0686, "step": 6460 }, { "epoch": 36.29859154929577, "grad_norm": 0.48294171690940857, "learning_rate": 1.9374e-05, "loss": 0.0398, "step": 6461 }, { "epoch": 36.304225352112674, "grad_norm": 0.5359922647476196, "learning_rate": 1.9377e-05, "loss": 0.0481, "step": 6462 }, { "epoch": 36.309859154929576, "grad_norm": 0.6712197065353394, "learning_rate": 1.938e-05, "loss": 0.0546, "step": 6463 }, { "epoch": 36.31549295774648, "grad_norm": 0.616208016872406, "learning_rate": 1.9383e-05, "loss": 0.046, "step": 6464 }, { "epoch": 36.32112676056338, "grad_norm": 0.4717561900615692, "learning_rate": 1.9386e-05, "loss": 0.046, "step": 6465 }, { "epoch": 36.32676056338028, "grad_norm": 0.5239540934562683, "learning_rate": 1.9389e-05, "loss": 0.042, "step": 6466 }, { "epoch": 36.332394366197185, "grad_norm": 0.5416713953018188, "learning_rate": 1.9392e-05, "loss": 0.032, "step": 6467 }, { "epoch": 36.33802816901409, "grad_norm": 0.8146408200263977, "learning_rate": 1.9395e-05, "loss": 0.1156, "step": 6468 }, { "epoch": 36.34366197183098, "grad_norm": 0.8470180034637451, "learning_rate": 1.9398e-05, "loss": 0.0506, "step": 6469 }, { "epoch": 36.349295774647885, "grad_norm": 0.5079064965248108, "learning_rate": 1.9401000000000003e-05, "loss": 0.0279, "step": 6470 }, { "epoch": 36.35492957746479, "grad_norm": 0.451831191778183, "learning_rate": 1.9404000000000003e-05, "loss": 0.0216, "step": 6471 }, { "epoch": 36.36056338028169, "grad_norm": 0.36586496233940125, "learning_rate": 1.9407000000000002e-05, "loss": 0.0198, "step": 6472 }, { "epoch": 36.36619718309859, "grad_norm": 0.5107755064964294, "learning_rate": 1.9410000000000002e-05, "loss": 0.0585, "step": 6473 }, { "epoch": 36.371830985915494, "grad_norm": 0.5502313375473022, "learning_rate": 1.9413000000000002e-05, "loss": 0.0324, "step": 6474 }, { "epoch": 36.3774647887324, "grad_norm": 0.6756883859634399, "learning_rate": 1.9416000000000002e-05, "loss": 0.0531, "step": 6475 }, { "epoch": 36.3830985915493, "grad_norm": 0.4642013609409332, "learning_rate": 1.9419e-05, "loss": 0.0168, "step": 6476 }, { "epoch": 36.388732394366194, "grad_norm": 0.826207160949707, "learning_rate": 1.9422e-05, "loss": 0.0783, "step": 6477 }, { "epoch": 36.394366197183096, "grad_norm": 0.6125365495681763, "learning_rate": 1.9424999999999998e-05, "loss": 0.0251, "step": 6478 }, { "epoch": 36.4, "grad_norm": 0.994480550289154, "learning_rate": 1.9427999999999998e-05, "loss": 0.0913, "step": 6479 }, { "epoch": 36.4056338028169, "grad_norm": 0.44094783067703247, "learning_rate": 1.9431e-05, "loss": 0.0265, "step": 6480 }, { "epoch": 36.4112676056338, "grad_norm": 0.5770249366760254, "learning_rate": 1.9434e-05, "loss": 0.0304, "step": 6481 }, { "epoch": 36.416901408450705, "grad_norm": 0.5327168107032776, "learning_rate": 1.9437e-05, "loss": 0.0404, "step": 6482 }, { "epoch": 36.42253521126761, "grad_norm": 0.5448128581047058, "learning_rate": 1.944e-05, "loss": 0.0406, "step": 6483 }, { "epoch": 36.42816901408451, "grad_norm": 0.5047175884246826, "learning_rate": 1.9443e-05, "loss": 0.0319, "step": 6484 }, { "epoch": 36.433802816901405, "grad_norm": 0.5711403489112854, "learning_rate": 1.9446e-05, "loss": 0.0519, "step": 6485 }, { "epoch": 36.43943661971831, "grad_norm": 1.4626579284667969, "learning_rate": 1.9449e-05, "loss": 0.059, "step": 6486 }, { "epoch": 36.44507042253521, "grad_norm": 0.5614196062088013, "learning_rate": 1.9452e-05, "loss": 0.0163, "step": 6487 }, { "epoch": 36.45070422535211, "grad_norm": 0.48988577723503113, "learning_rate": 1.9455e-05, "loss": 0.0291, "step": 6488 }, { "epoch": 36.456338028169014, "grad_norm": 0.45402973890304565, "learning_rate": 1.9458e-05, "loss": 0.0137, "step": 6489 }, { "epoch": 36.46197183098592, "grad_norm": 0.370618999004364, "learning_rate": 1.9461000000000002e-05, "loss": 0.0403, "step": 6490 }, { "epoch": 36.46760563380282, "grad_norm": 0.3172646462917328, "learning_rate": 1.9464000000000002e-05, "loss": 0.0109, "step": 6491 }, { "epoch": 36.47323943661972, "grad_norm": 0.4391406178474426, "learning_rate": 1.9467000000000002e-05, "loss": 0.0443, "step": 6492 }, { "epoch": 36.478873239436616, "grad_norm": 0.3194355368614197, "learning_rate": 1.947e-05, "loss": 0.0166, "step": 6493 }, { "epoch": 36.48450704225352, "grad_norm": 0.7170020937919617, "learning_rate": 1.9473e-05, "loss": 0.0405, "step": 6494 }, { "epoch": 36.49014084507042, "grad_norm": 0.657343864440918, "learning_rate": 1.9476e-05, "loss": 0.0688, "step": 6495 }, { "epoch": 36.49577464788732, "grad_norm": 0.8646242022514343, "learning_rate": 1.9479e-05, "loss": 0.0394, "step": 6496 }, { "epoch": 36.501408450704226, "grad_norm": 0.7627384662628174, "learning_rate": 1.9482e-05, "loss": 0.1879, "step": 6497 }, { "epoch": 36.50704225352113, "grad_norm": 1.590726375579834, "learning_rate": 1.9485e-05, "loss": 0.1015, "step": 6498 }, { "epoch": 36.51267605633803, "grad_norm": 0.6553433537483215, "learning_rate": 1.9488e-05, "loss": 0.0775, "step": 6499 }, { "epoch": 36.51830985915493, "grad_norm": 0.652147650718689, "learning_rate": 1.9491000000000004e-05, "loss": 0.0944, "step": 6500 }, { "epoch": 36.52394366197183, "grad_norm": 0.6070206165313721, "learning_rate": 1.9494000000000003e-05, "loss": 0.0721, "step": 6501 }, { "epoch": 36.52957746478873, "grad_norm": 0.6174696683883667, "learning_rate": 1.9497e-05, "loss": 0.0622, "step": 6502 }, { "epoch": 36.53521126760563, "grad_norm": 0.590227484703064, "learning_rate": 1.95e-05, "loss": 0.0785, "step": 6503 }, { "epoch": 36.540845070422534, "grad_norm": 0.7460767030715942, "learning_rate": 1.9503e-05, "loss": 0.0714, "step": 6504 }, { "epoch": 36.54647887323944, "grad_norm": 0.46003609895706177, "learning_rate": 1.9506e-05, "loss": 0.0404, "step": 6505 }, { "epoch": 36.55211267605634, "grad_norm": 0.7416086196899414, "learning_rate": 1.9509e-05, "loss": 0.042, "step": 6506 }, { "epoch": 36.55774647887324, "grad_norm": 0.5567677021026611, "learning_rate": 1.9512e-05, "loss": 0.0731, "step": 6507 }, { "epoch": 36.563380281690144, "grad_norm": 0.8451827764511108, "learning_rate": 1.9515e-05, "loss": 0.0504, "step": 6508 }, { "epoch": 36.56901408450704, "grad_norm": 0.5418282747268677, "learning_rate": 1.9518e-05, "loss": 0.1142, "step": 6509 }, { "epoch": 36.57464788732394, "grad_norm": 0.5903849005699158, "learning_rate": 1.9520999999999998e-05, "loss": 0.0799, "step": 6510 }, { "epoch": 36.58028169014084, "grad_norm": 0.865992546081543, "learning_rate": 1.9524e-05, "loss": 0.0401, "step": 6511 }, { "epoch": 36.585915492957746, "grad_norm": 0.5943751931190491, "learning_rate": 1.9527e-05, "loss": 0.0349, "step": 6512 }, { "epoch": 36.59154929577465, "grad_norm": 0.3739730715751648, "learning_rate": 1.953e-05, "loss": 0.0264, "step": 6513 }, { "epoch": 36.59718309859155, "grad_norm": 0.48203402757644653, "learning_rate": 1.9533e-05, "loss": 0.0528, "step": 6514 }, { "epoch": 36.60281690140845, "grad_norm": 0.5376028418540955, "learning_rate": 1.9536e-05, "loss": 0.0217, "step": 6515 }, { "epoch": 36.608450704225355, "grad_norm": 0.6289936900138855, "learning_rate": 1.9539e-05, "loss": 0.0318, "step": 6516 }, { "epoch": 36.61408450704225, "grad_norm": 0.5579929351806641, "learning_rate": 1.9542e-05, "loss": 0.0587, "step": 6517 }, { "epoch": 36.61971830985915, "grad_norm": 0.5084307193756104, "learning_rate": 1.9545e-05, "loss": 0.0374, "step": 6518 }, { "epoch": 36.625352112676055, "grad_norm": 0.34254348278045654, "learning_rate": 1.9548e-05, "loss": 0.0224, "step": 6519 }, { "epoch": 36.63098591549296, "grad_norm": 0.41878747940063477, "learning_rate": 1.9551e-05, "loss": 0.0203, "step": 6520 }, { "epoch": 36.63661971830986, "grad_norm": 0.6721781492233276, "learning_rate": 1.9554000000000003e-05, "loss": 0.0285, "step": 6521 }, { "epoch": 36.64225352112676, "grad_norm": 0.5031753182411194, "learning_rate": 1.9557000000000003e-05, "loss": 0.0253, "step": 6522 }, { "epoch": 36.647887323943664, "grad_norm": 0.6247519254684448, "learning_rate": 1.9560000000000002e-05, "loss": 0.0296, "step": 6523 }, { "epoch": 36.653521126760566, "grad_norm": 0.48034247756004333, "learning_rate": 1.9563000000000002e-05, "loss": 0.0177, "step": 6524 }, { "epoch": 36.65915492957747, "grad_norm": 0.6475138068199158, "learning_rate": 1.9566000000000002e-05, "loss": 0.033, "step": 6525 }, { "epoch": 36.66478873239436, "grad_norm": 0.5890708565711975, "learning_rate": 1.9569000000000002e-05, "loss": 0.028, "step": 6526 }, { "epoch": 36.670422535211266, "grad_norm": 0.36329716444015503, "learning_rate": 1.9571999999999998e-05, "loss": 0.0233, "step": 6527 }, { "epoch": 36.67605633802817, "grad_norm": 0.41638800501823425, "learning_rate": 1.9574999999999998e-05, "loss": 0.0209, "step": 6528 }, { "epoch": 36.68169014084507, "grad_norm": 0.898169219493866, "learning_rate": 1.9577999999999998e-05, "loss": 0.0511, "step": 6529 }, { "epoch": 36.68732394366197, "grad_norm": 0.4995548725128174, "learning_rate": 1.9580999999999998e-05, "loss": 0.031, "step": 6530 }, { "epoch": 36.692957746478875, "grad_norm": 0.5080646872520447, "learning_rate": 1.9584e-05, "loss": 0.0352, "step": 6531 }, { "epoch": 36.69859154929578, "grad_norm": 0.4895935654640198, "learning_rate": 1.9587e-05, "loss": 0.028, "step": 6532 }, { "epoch": 36.70422535211267, "grad_norm": 0.7282553315162659, "learning_rate": 1.959e-05, "loss": 0.0488, "step": 6533 }, { "epoch": 36.709859154929575, "grad_norm": 0.2965199649333954, "learning_rate": 1.9593e-05, "loss": 0.0104, "step": 6534 }, { "epoch": 36.71549295774648, "grad_norm": 0.34920480847358704, "learning_rate": 1.9596e-05, "loss": 0.0111, "step": 6535 }, { "epoch": 36.72112676056338, "grad_norm": 0.816827654838562, "learning_rate": 1.9599e-05, "loss": 0.0306, "step": 6536 }, { "epoch": 36.72676056338028, "grad_norm": 0.35633137822151184, "learning_rate": 1.9602e-05, "loss": 0.0107, "step": 6537 }, { "epoch": 36.732394366197184, "grad_norm": 0.9389570355415344, "learning_rate": 1.9605e-05, "loss": 0.0343, "step": 6538 }, { "epoch": 36.738028169014086, "grad_norm": 0.40978357195854187, "learning_rate": 1.9608e-05, "loss": 0.0056, "step": 6539 }, { "epoch": 36.74366197183099, "grad_norm": 0.521361768245697, "learning_rate": 1.9611e-05, "loss": 0.0245, "step": 6540 }, { "epoch": 36.74929577464789, "grad_norm": 0.5914299488067627, "learning_rate": 1.9614000000000002e-05, "loss": 0.1274, "step": 6541 }, { "epoch": 36.754929577464786, "grad_norm": 0.5633407235145569, "learning_rate": 1.9617000000000002e-05, "loss": 0.0882, "step": 6542 }, { "epoch": 36.76056338028169, "grad_norm": 0.557871401309967, "learning_rate": 1.9620000000000002e-05, "loss": 0.0923, "step": 6543 }, { "epoch": 36.76619718309859, "grad_norm": 0.49170055985450745, "learning_rate": 1.9623e-05, "loss": 0.1081, "step": 6544 }, { "epoch": 36.77183098591549, "grad_norm": 0.4588392674922943, "learning_rate": 1.9626e-05, "loss": 0.1093, "step": 6545 }, { "epoch": 36.777464788732395, "grad_norm": 0.4536442756652832, "learning_rate": 1.9629e-05, "loss": 0.063, "step": 6546 }, { "epoch": 36.7830985915493, "grad_norm": 0.5171947479248047, "learning_rate": 1.9632e-05, "loss": 0.0741, "step": 6547 }, { "epoch": 36.7887323943662, "grad_norm": 0.49034810066223145, "learning_rate": 1.9635e-05, "loss": 0.0965, "step": 6548 }, { "epoch": 36.7943661971831, "grad_norm": 1.499422550201416, "learning_rate": 1.9638e-05, "loss": 0.066, "step": 6549 }, { "epoch": 36.8, "grad_norm": 0.4111042618751526, "learning_rate": 1.9641e-05, "loss": 0.0533, "step": 6550 }, { "epoch": 36.8056338028169, "grad_norm": 0.6004112362861633, "learning_rate": 1.9644e-05, "loss": 0.0681, "step": 6551 }, { "epoch": 36.8112676056338, "grad_norm": 0.5285236239433289, "learning_rate": 1.9647e-05, "loss": 0.0379, "step": 6552 }, { "epoch": 36.816901408450704, "grad_norm": 0.6419047117233276, "learning_rate": 1.965e-05, "loss": 0.0414, "step": 6553 }, { "epoch": 36.822535211267606, "grad_norm": 0.6848231554031372, "learning_rate": 1.9653e-05, "loss": 0.1109, "step": 6554 }, { "epoch": 36.82816901408451, "grad_norm": 0.46841198205947876, "learning_rate": 1.9656e-05, "loss": 0.0245, "step": 6555 }, { "epoch": 36.83380281690141, "grad_norm": 1.7216283082962036, "learning_rate": 1.9659e-05, "loss": 0.0502, "step": 6556 }, { "epoch": 36.83943661971831, "grad_norm": 0.6493949294090271, "learning_rate": 1.9662e-05, "loss": 0.0271, "step": 6557 }, { "epoch": 36.84507042253521, "grad_norm": 0.6937504410743713, "learning_rate": 1.9665e-05, "loss": 0.0515, "step": 6558 }, { "epoch": 36.85070422535211, "grad_norm": 0.5990607142448425, "learning_rate": 1.9668e-05, "loss": 0.0509, "step": 6559 }, { "epoch": 36.85633802816901, "grad_norm": 0.3977926969528198, "learning_rate": 1.9671e-05, "loss": 0.0181, "step": 6560 }, { "epoch": 36.861971830985915, "grad_norm": 0.5182214379310608, "learning_rate": 1.9674000000000002e-05, "loss": 0.0368, "step": 6561 }, { "epoch": 36.86760563380282, "grad_norm": 0.6894580125808716, "learning_rate": 1.9677e-05, "loss": 0.0283, "step": 6562 }, { "epoch": 36.87323943661972, "grad_norm": 1.2089155912399292, "learning_rate": 1.968e-05, "loss": 0.0324, "step": 6563 }, { "epoch": 36.87887323943662, "grad_norm": 0.6826744675636292, "learning_rate": 1.9683e-05, "loss": 0.0761, "step": 6564 }, { "epoch": 36.884507042253524, "grad_norm": 0.5875285863876343, "learning_rate": 1.9686e-05, "loss": 0.0167, "step": 6565 }, { "epoch": 36.89014084507042, "grad_norm": 0.3168661594390869, "learning_rate": 1.9689e-05, "loss": 0.0135, "step": 6566 }, { "epoch": 36.89577464788732, "grad_norm": 0.41377338767051697, "learning_rate": 1.9692e-05, "loss": 0.0127, "step": 6567 }, { "epoch": 36.901408450704224, "grad_norm": 0.46450644731521606, "learning_rate": 1.9695e-05, "loss": 0.0179, "step": 6568 }, { "epoch": 36.907042253521126, "grad_norm": 0.4941331744194031, "learning_rate": 1.9698e-05, "loss": 0.0179, "step": 6569 }, { "epoch": 36.91267605633803, "grad_norm": 0.5497116446495056, "learning_rate": 1.9701e-05, "loss": 0.0187, "step": 6570 }, { "epoch": 36.91830985915493, "grad_norm": 0.4914342761039734, "learning_rate": 1.9704000000000003e-05, "loss": 0.0396, "step": 6571 }, { "epoch": 36.92394366197183, "grad_norm": 0.47178441286087036, "learning_rate": 1.9707000000000003e-05, "loss": 0.0176, "step": 6572 }, { "epoch": 36.929577464788736, "grad_norm": 0.5780442357063293, "learning_rate": 1.9710000000000003e-05, "loss": 0.0562, "step": 6573 }, { "epoch": 36.93521126760563, "grad_norm": 0.3219510614871979, "learning_rate": 1.9713000000000003e-05, "loss": 0.0218, "step": 6574 }, { "epoch": 36.94084507042253, "grad_norm": 0.35696694254875183, "learning_rate": 1.9716000000000002e-05, "loss": 0.0124, "step": 6575 }, { "epoch": 36.946478873239435, "grad_norm": 0.604103684425354, "learning_rate": 1.9719e-05, "loss": 0.0293, "step": 6576 }, { "epoch": 36.95211267605634, "grad_norm": 1.1942545175552368, "learning_rate": 1.9722e-05, "loss": 0.0398, "step": 6577 }, { "epoch": 36.95774647887324, "grad_norm": 0.5000042915344238, "learning_rate": 1.9725e-05, "loss": 0.0301, "step": 6578 }, { "epoch": 36.96338028169014, "grad_norm": 0.338764488697052, "learning_rate": 1.9727999999999998e-05, "loss": 0.0188, "step": 6579 }, { "epoch": 36.969014084507045, "grad_norm": 0.43519094586372375, "learning_rate": 1.9730999999999998e-05, "loss": 0.04, "step": 6580 }, { "epoch": 36.97464788732395, "grad_norm": 0.6621068120002747, "learning_rate": 1.9734e-05, "loss": 0.0112, "step": 6581 }, { "epoch": 36.98028169014084, "grad_norm": 0.5478929877281189, "learning_rate": 1.9737e-05, "loss": 0.0206, "step": 6582 }, { "epoch": 36.985915492957744, "grad_norm": 0.3546035587787628, "learning_rate": 1.974e-05, "loss": 0.0079, "step": 6583 }, { "epoch": 36.99154929577465, "grad_norm": 0.7360183596611023, "learning_rate": 1.9743e-05, "loss": 0.0114, "step": 6584 }, { "epoch": 36.99718309859155, "grad_norm": 0.8624211549758911, "learning_rate": 1.9746e-05, "loss": 0.0482, "step": 6585 }, { "epoch": 37.0, "grad_norm": 1.3395333290100098, "learning_rate": 1.9749e-05, "loss": 0.0104, "step": 6586 }, { "epoch": 37.0056338028169, "grad_norm": 0.6296811699867249, "learning_rate": 1.9752e-05, "loss": 0.0787, "step": 6587 }, { "epoch": 37.011267605633805, "grad_norm": 0.5340318083763123, "learning_rate": 1.9755e-05, "loss": 0.081, "step": 6588 }, { "epoch": 37.01690140845071, "grad_norm": 1.1424528360366821, "learning_rate": 1.9758e-05, "loss": 0.092, "step": 6589 }, { "epoch": 37.02253521126761, "grad_norm": 0.5526189208030701, "learning_rate": 1.9761e-05, "loss": 0.0806, "step": 6590 }, { "epoch": 37.028169014084504, "grad_norm": 0.5099465250968933, "learning_rate": 1.9764000000000003e-05, "loss": 0.0771, "step": 6591 }, { "epoch": 37.03380281690141, "grad_norm": 0.6083606481552124, "learning_rate": 1.9767000000000002e-05, "loss": 0.0558, "step": 6592 }, { "epoch": 37.03943661971831, "grad_norm": 0.4032229483127594, "learning_rate": 1.9770000000000002e-05, "loss": 0.067, "step": 6593 }, { "epoch": 37.04507042253521, "grad_norm": 0.5628703832626343, "learning_rate": 1.9773000000000002e-05, "loss": 0.0474, "step": 6594 }, { "epoch": 37.05070422535211, "grad_norm": 0.5671467781066895, "learning_rate": 1.9776000000000002e-05, "loss": 0.0722, "step": 6595 }, { "epoch": 37.056338028169016, "grad_norm": 0.5441295504570007, "learning_rate": 1.9779e-05, "loss": 0.0419, "step": 6596 }, { "epoch": 37.06197183098592, "grad_norm": 0.6422341465950012, "learning_rate": 1.9782e-05, "loss": 0.0565, "step": 6597 }, { "epoch": 37.06760563380282, "grad_norm": 1.015497088432312, "learning_rate": 1.9785e-05, "loss": 0.0737, "step": 6598 }, { "epoch": 37.073239436619716, "grad_norm": 0.40871334075927734, "learning_rate": 1.9788e-05, "loss": 0.0498, "step": 6599 }, { "epoch": 37.07887323943662, "grad_norm": 0.6148958802223206, "learning_rate": 1.9791e-05, "loss": 0.0812, "step": 6600 }, { "epoch": 37.08450704225352, "grad_norm": 0.42529529333114624, "learning_rate": 1.9794e-05, "loss": 0.0313, "step": 6601 }, { "epoch": 37.09014084507042, "grad_norm": 0.658881425857544, "learning_rate": 1.9797e-05, "loss": 0.0366, "step": 6602 }, { "epoch": 37.095774647887325, "grad_norm": 0.6145992875099182, "learning_rate": 1.98e-05, "loss": 0.0576, "step": 6603 }, { "epoch": 37.10140845070423, "grad_norm": 0.565244734287262, "learning_rate": 1.9803e-05, "loss": 0.0243, "step": 6604 }, { "epoch": 37.10704225352113, "grad_norm": 0.4724868834018707, "learning_rate": 1.9806e-05, "loss": 0.0486, "step": 6605 }, { "epoch": 37.11267605633803, "grad_norm": 0.7471914887428284, "learning_rate": 1.9809e-05, "loss": 0.0237, "step": 6606 }, { "epoch": 37.11830985915493, "grad_norm": 0.562654435634613, "learning_rate": 1.9812e-05, "loss": 0.0175, "step": 6607 }, { "epoch": 37.12394366197183, "grad_norm": 0.48671096563339233, "learning_rate": 1.9815e-05, "loss": 0.0347, "step": 6608 }, { "epoch": 37.12957746478873, "grad_norm": 0.5672674179077148, "learning_rate": 1.9818e-05, "loss": 0.026, "step": 6609 }, { "epoch": 37.135211267605634, "grad_norm": 0.42659029364585876, "learning_rate": 1.9821e-05, "loss": 0.0178, "step": 6610 }, { "epoch": 37.140845070422536, "grad_norm": 0.3345874547958374, "learning_rate": 1.9824000000000002e-05, "loss": 0.0208, "step": 6611 }, { "epoch": 37.14647887323944, "grad_norm": 0.48991313576698303, "learning_rate": 1.9827000000000002e-05, "loss": 0.0292, "step": 6612 }, { "epoch": 37.15211267605634, "grad_norm": 0.4738418161869049, "learning_rate": 1.983e-05, "loss": 0.0168, "step": 6613 }, { "epoch": 37.15774647887324, "grad_norm": 0.38414907455444336, "learning_rate": 1.9833e-05, "loss": 0.0154, "step": 6614 }, { "epoch": 37.16338028169014, "grad_norm": 0.5199055671691895, "learning_rate": 1.9836e-05, "loss": 0.0169, "step": 6615 }, { "epoch": 37.16901408450704, "grad_norm": 0.6027151346206665, "learning_rate": 1.9839e-05, "loss": 0.0599, "step": 6616 }, { "epoch": 37.17464788732394, "grad_norm": 0.5507667064666748, "learning_rate": 1.9842e-05, "loss": 0.0357, "step": 6617 }, { "epoch": 37.180281690140845, "grad_norm": 0.3800807595252991, "learning_rate": 1.9845e-05, "loss": 0.0231, "step": 6618 }, { "epoch": 37.18591549295775, "grad_norm": 0.3116394877433777, "learning_rate": 1.9848e-05, "loss": 0.0193, "step": 6619 }, { "epoch": 37.19154929577465, "grad_norm": 0.7073714733123779, "learning_rate": 1.9851e-05, "loss": 0.0467, "step": 6620 }, { "epoch": 37.19718309859155, "grad_norm": 0.7172895669937134, "learning_rate": 1.9854000000000003e-05, "loss": 0.0517, "step": 6621 }, { "epoch": 37.202816901408454, "grad_norm": 0.3944377303123474, "learning_rate": 1.9857000000000003e-05, "loss": 0.0191, "step": 6622 }, { "epoch": 37.20845070422535, "grad_norm": 0.389423668384552, "learning_rate": 1.9860000000000003e-05, "loss": 0.0088, "step": 6623 }, { "epoch": 37.21408450704225, "grad_norm": 0.4244140088558197, "learning_rate": 1.9863000000000003e-05, "loss": 0.0328, "step": 6624 }, { "epoch": 37.219718309859154, "grad_norm": 0.554714560508728, "learning_rate": 1.9866e-05, "loss": 0.0115, "step": 6625 }, { "epoch": 37.225352112676056, "grad_norm": 0.5553799271583557, "learning_rate": 1.9869e-05, "loss": 0.0372, "step": 6626 }, { "epoch": 37.23098591549296, "grad_norm": 0.8386805653572083, "learning_rate": 1.9872e-05, "loss": 0.0149, "step": 6627 }, { "epoch": 37.23661971830986, "grad_norm": 0.3891904056072235, "learning_rate": 1.9875e-05, "loss": 0.0284, "step": 6628 }, { "epoch": 37.24225352112676, "grad_norm": 0.4008524715900421, "learning_rate": 1.9878e-05, "loss": 0.0292, "step": 6629 }, { "epoch": 37.247887323943665, "grad_norm": 0.50032639503479, "learning_rate": 1.9880999999999998e-05, "loss": 0.0168, "step": 6630 }, { "epoch": 37.25352112676056, "grad_norm": 0.9708119034767151, "learning_rate": 1.9883999999999998e-05, "loss": 0.1615, "step": 6631 }, { "epoch": 37.25915492957746, "grad_norm": 0.5522500276565552, "learning_rate": 1.9887e-05, "loss": 0.0898, "step": 6632 }, { "epoch": 37.264788732394365, "grad_norm": 0.5567334890365601, "learning_rate": 1.989e-05, "loss": 0.0584, "step": 6633 }, { "epoch": 37.27042253521127, "grad_norm": 0.6415675282478333, "learning_rate": 1.9893e-05, "loss": 0.0955, "step": 6634 }, { "epoch": 37.27605633802817, "grad_norm": 0.5224911570549011, "learning_rate": 1.9896e-05, "loss": 0.0576, "step": 6635 }, { "epoch": 37.28169014084507, "grad_norm": 1.0853354930877686, "learning_rate": 1.9899e-05, "loss": 0.0924, "step": 6636 }, { "epoch": 37.287323943661974, "grad_norm": 0.4066997766494751, "learning_rate": 1.9902e-05, "loss": 0.0545, "step": 6637 }, { "epoch": 37.292957746478876, "grad_norm": 0.6657580137252808, "learning_rate": 1.9905e-05, "loss": 0.1095, "step": 6638 }, { "epoch": 37.29859154929577, "grad_norm": 0.6166088581085205, "learning_rate": 1.9908e-05, "loss": 0.0443, "step": 6639 }, { "epoch": 37.304225352112674, "grad_norm": 0.5871906280517578, "learning_rate": 1.9911e-05, "loss": 0.0468, "step": 6640 }, { "epoch": 37.309859154929576, "grad_norm": 0.4381653666496277, "learning_rate": 1.9914e-05, "loss": 0.0296, "step": 6641 }, { "epoch": 37.31549295774648, "grad_norm": 0.5414829254150391, "learning_rate": 1.9917000000000003e-05, "loss": 0.0297, "step": 6642 }, { "epoch": 37.32112676056338, "grad_norm": 0.5593733787536621, "learning_rate": 1.9920000000000002e-05, "loss": 0.0978, "step": 6643 }, { "epoch": 37.32676056338028, "grad_norm": 0.5262511968612671, "learning_rate": 1.9923000000000002e-05, "loss": 0.0359, "step": 6644 }, { "epoch": 37.332394366197185, "grad_norm": 0.557573676109314, "learning_rate": 1.9926000000000002e-05, "loss": 0.034, "step": 6645 }, { "epoch": 37.33802816901409, "grad_norm": 0.569508969783783, "learning_rate": 1.9929000000000002e-05, "loss": 0.0366, "step": 6646 }, { "epoch": 37.34366197183098, "grad_norm": 0.3861851096153259, "learning_rate": 1.9932e-05, "loss": 0.0199, "step": 6647 }, { "epoch": 37.349295774647885, "grad_norm": 0.7894451022148132, "learning_rate": 1.9935e-05, "loss": 0.0629, "step": 6648 }, { "epoch": 37.35492957746479, "grad_norm": 0.6663050055503845, "learning_rate": 1.9938e-05, "loss": 0.0337, "step": 6649 }, { "epoch": 37.36056338028169, "grad_norm": 0.4150336980819702, "learning_rate": 1.9940999999999998e-05, "loss": 0.0345, "step": 6650 }, { "epoch": 37.36619718309859, "grad_norm": 0.40808337926864624, "learning_rate": 1.9943999999999997e-05, "loss": 0.0423, "step": 6651 }, { "epoch": 37.371830985915494, "grad_norm": 0.4650689959526062, "learning_rate": 1.9947e-05, "loss": 0.0353, "step": 6652 }, { "epoch": 37.3774647887324, "grad_norm": 0.46836698055267334, "learning_rate": 1.995e-05, "loss": 0.0239, "step": 6653 }, { "epoch": 37.3830985915493, "grad_norm": 0.40655678510665894, "learning_rate": 1.9953e-05, "loss": 0.0132, "step": 6654 }, { "epoch": 37.388732394366194, "grad_norm": 0.5247926712036133, "learning_rate": 1.9956e-05, "loss": 0.0317, "step": 6655 }, { "epoch": 37.394366197183096, "grad_norm": 0.3625401258468628, "learning_rate": 1.9959e-05, "loss": 0.0135, "step": 6656 }, { "epoch": 37.4, "grad_norm": 0.6007390022277832, "learning_rate": 1.9962e-05, "loss": 0.0673, "step": 6657 }, { "epoch": 37.4056338028169, "grad_norm": 0.5294614434242249, "learning_rate": 1.9965e-05, "loss": 0.0185, "step": 6658 }, { "epoch": 37.4112676056338, "grad_norm": 0.3338068425655365, "learning_rate": 1.9968e-05, "loss": 0.0164, "step": 6659 }, { "epoch": 37.416901408450705, "grad_norm": 0.6312756538391113, "learning_rate": 1.9971e-05, "loss": 0.0271, "step": 6660 }, { "epoch": 37.42253521126761, "grad_norm": 0.5010232925415039, "learning_rate": 1.9974e-05, "loss": 0.0128, "step": 6661 }, { "epoch": 37.42816901408451, "grad_norm": 0.5095540285110474, "learning_rate": 1.9977000000000002e-05, "loss": 0.0626, "step": 6662 }, { "epoch": 37.433802816901405, "grad_norm": 0.6516841053962708, "learning_rate": 1.9980000000000002e-05, "loss": 0.0315, "step": 6663 }, { "epoch": 37.43943661971831, "grad_norm": 0.7715452313423157, "learning_rate": 1.9983e-05, "loss": 0.0123, "step": 6664 }, { "epoch": 37.44507042253521, "grad_norm": 0.6903049945831299, "learning_rate": 1.9986e-05, "loss": 0.0285, "step": 6665 }, { "epoch": 37.45070422535211, "grad_norm": 0.38027986884117126, "learning_rate": 1.9989e-05, "loss": 0.0226, "step": 6666 }, { "epoch": 37.456338028169014, "grad_norm": 0.6699754595756531, "learning_rate": 1.9992e-05, "loss": 0.0499, "step": 6667 }, { "epoch": 37.46197183098592, "grad_norm": 0.46880000829696655, "learning_rate": 1.9995e-05, "loss": 0.0144, "step": 6668 }, { "epoch": 37.46760563380282, "grad_norm": 0.513952374458313, "learning_rate": 1.9998e-05, "loss": 0.0213, "step": 6669 }, { "epoch": 37.47323943661972, "grad_norm": 0.41860079765319824, "learning_rate": 2.0001e-05, "loss": 0.038, "step": 6670 }, { "epoch": 37.478873239436616, "grad_norm": 0.26500871777534485, "learning_rate": 2.0004e-05, "loss": 0.0058, "step": 6671 }, { "epoch": 37.48450704225352, "grad_norm": 0.43941643834114075, "learning_rate": 2.0007000000000003e-05, "loss": 0.0112, "step": 6672 }, { "epoch": 37.49014084507042, "grad_norm": 0.7673001289367676, "learning_rate": 2.0010000000000003e-05, "loss": 0.0337, "step": 6673 }, { "epoch": 37.49577464788732, "grad_norm": 0.7755512595176697, "learning_rate": 2.0013e-05, "loss": 0.0361, "step": 6674 }, { "epoch": 37.501408450704226, "grad_norm": 0.8575171828269958, "learning_rate": 2.0016e-05, "loss": 0.1805, "step": 6675 }, { "epoch": 37.50704225352113, "grad_norm": 0.5414265394210815, "learning_rate": 2.0019e-05, "loss": 0.0592, "step": 6676 }, { "epoch": 37.51267605633803, "grad_norm": 0.5862269997596741, "learning_rate": 2.0022e-05, "loss": 0.0671, "step": 6677 }, { "epoch": 37.51830985915493, "grad_norm": 0.43087902665138245, "learning_rate": 2.0025e-05, "loss": 0.0738, "step": 6678 }, { "epoch": 37.52394366197183, "grad_norm": 0.7113614678382874, "learning_rate": 2.0028e-05, "loss": 0.0805, "step": 6679 }, { "epoch": 37.52957746478873, "grad_norm": 0.5316802859306335, "learning_rate": 2.0031e-05, "loss": 0.0674, "step": 6680 }, { "epoch": 37.53521126760563, "grad_norm": 0.4913565516471863, "learning_rate": 2.0033999999999998e-05, "loss": 0.1127, "step": 6681 }, { "epoch": 37.540845070422534, "grad_norm": 0.6187050938606262, "learning_rate": 2.0037e-05, "loss": 0.0974, "step": 6682 }, { "epoch": 37.54647887323944, "grad_norm": 0.7966198921203613, "learning_rate": 2.004e-05, "loss": 0.0397, "step": 6683 }, { "epoch": 37.55211267605634, "grad_norm": 0.7675522565841675, "learning_rate": 2.0043e-05, "loss": 0.0561, "step": 6684 }, { "epoch": 37.55774647887324, "grad_norm": 0.5843971371650696, "learning_rate": 2.0046e-05, "loss": 0.0679, "step": 6685 }, { "epoch": 37.563380281690144, "grad_norm": 0.43494588136672974, "learning_rate": 2.0049e-05, "loss": 0.0331, "step": 6686 }, { "epoch": 37.56901408450704, "grad_norm": 0.37497416138648987, "learning_rate": 2.0052e-05, "loss": 0.0306, "step": 6687 }, { "epoch": 37.57464788732394, "grad_norm": 0.7802902460098267, "learning_rate": 2.0055e-05, "loss": 0.045, "step": 6688 }, { "epoch": 37.58028169014084, "grad_norm": 0.5478717684745789, "learning_rate": 2.0058e-05, "loss": 0.0549, "step": 6689 }, { "epoch": 37.585915492957746, "grad_norm": 0.48405370116233826, "learning_rate": 2.0061e-05, "loss": 0.0258, "step": 6690 }, { "epoch": 37.59154929577465, "grad_norm": 0.5015719532966614, "learning_rate": 2.0064e-05, "loss": 0.0463, "step": 6691 }, { "epoch": 37.59718309859155, "grad_norm": 0.7796661853790283, "learning_rate": 2.0067000000000003e-05, "loss": 0.0552, "step": 6692 }, { "epoch": 37.60281690140845, "grad_norm": 0.44841083884239197, "learning_rate": 2.0070000000000003e-05, "loss": 0.0233, "step": 6693 }, { "epoch": 37.608450704225355, "grad_norm": 0.6160410642623901, "learning_rate": 2.0073000000000002e-05, "loss": 0.0282, "step": 6694 }, { "epoch": 37.61408450704225, "grad_norm": 0.400128573179245, "learning_rate": 2.0076000000000002e-05, "loss": 0.0221, "step": 6695 }, { "epoch": 37.61971830985915, "grad_norm": 0.5070633292198181, "learning_rate": 2.0079000000000002e-05, "loss": 0.0185, "step": 6696 }, { "epoch": 37.625352112676055, "grad_norm": 0.6032521724700928, "learning_rate": 2.0082000000000002e-05, "loss": 0.0356, "step": 6697 }, { "epoch": 37.63098591549296, "grad_norm": 0.579928994178772, "learning_rate": 2.0085e-05, "loss": 0.0201, "step": 6698 }, { "epoch": 37.63661971830986, "grad_norm": 0.4717251658439636, "learning_rate": 2.0087999999999998e-05, "loss": 0.0292, "step": 6699 }, { "epoch": 37.64225352112676, "grad_norm": 0.4430631101131439, "learning_rate": 2.0090999999999998e-05, "loss": 0.0238, "step": 6700 }, { "epoch": 37.647887323943664, "grad_norm": 0.54737788438797, "learning_rate": 2.0093999999999998e-05, "loss": 0.0189, "step": 6701 }, { "epoch": 37.653521126760566, "grad_norm": 0.476429283618927, "learning_rate": 2.0097e-05, "loss": 0.0094, "step": 6702 }, { "epoch": 37.65915492957747, "grad_norm": 0.3925788402557373, "learning_rate": 2.01e-05, "loss": 0.0129, "step": 6703 }, { "epoch": 37.66478873239436, "grad_norm": 0.5342569947242737, "learning_rate": 2.0103e-05, "loss": 0.0381, "step": 6704 }, { "epoch": 37.670422535211266, "grad_norm": 0.5101678371429443, "learning_rate": 2.0106e-05, "loss": 0.0098, "step": 6705 }, { "epoch": 37.67605633802817, "grad_norm": 0.7376613020896912, "learning_rate": 2.0109e-05, "loss": 0.0292, "step": 6706 }, { "epoch": 37.68169014084507, "grad_norm": 0.7361453771591187, "learning_rate": 2.0112e-05, "loss": 0.0244, "step": 6707 }, { "epoch": 37.68732394366197, "grad_norm": 0.4697571098804474, "learning_rate": 2.0115e-05, "loss": 0.0675, "step": 6708 }, { "epoch": 37.692957746478875, "grad_norm": 0.8032397031784058, "learning_rate": 2.0118e-05, "loss": 0.0309, "step": 6709 }, { "epoch": 37.69859154929578, "grad_norm": 1.1545032262802124, "learning_rate": 2.0121e-05, "loss": 0.0229, "step": 6710 }, { "epoch": 37.70422535211267, "grad_norm": 0.5821328163146973, "learning_rate": 2.0124e-05, "loss": 0.04, "step": 6711 }, { "epoch": 37.709859154929575, "grad_norm": 0.6135793328285217, "learning_rate": 2.0127000000000002e-05, "loss": 0.0161, "step": 6712 }, { "epoch": 37.71549295774648, "grad_norm": 1.788836121559143, "learning_rate": 2.0130000000000002e-05, "loss": 0.038, "step": 6713 }, { "epoch": 37.72112676056338, "grad_norm": 1.1431268453598022, "learning_rate": 2.0133000000000002e-05, "loss": 0.0136, "step": 6714 }, { "epoch": 37.72676056338028, "grad_norm": 0.780083179473877, "learning_rate": 2.0136e-05, "loss": 0.0277, "step": 6715 }, { "epoch": 37.732394366197184, "grad_norm": 0.6091696619987488, "learning_rate": 2.0139e-05, "loss": 0.0181, "step": 6716 }, { "epoch": 37.738028169014086, "grad_norm": 0.48903611302375793, "learning_rate": 2.0142e-05, "loss": 0.0185, "step": 6717 }, { "epoch": 37.74366197183099, "grad_norm": 0.6945245265960693, "learning_rate": 2.0145e-05, "loss": 0.0077, "step": 6718 }, { "epoch": 37.74929577464789, "grad_norm": 1.206674337387085, "learning_rate": 2.0148e-05, "loss": 0.1585, "step": 6719 }, { "epoch": 37.754929577464786, "grad_norm": 0.5300062894821167, "learning_rate": 2.0151e-05, "loss": 0.0842, "step": 6720 }, { "epoch": 37.76056338028169, "grad_norm": 0.5945276021957397, "learning_rate": 2.0154e-05, "loss": 0.0952, "step": 6721 }, { "epoch": 37.76619718309859, "grad_norm": 0.5959362983703613, "learning_rate": 2.0157000000000004e-05, "loss": 0.0753, "step": 6722 }, { "epoch": 37.77183098591549, "grad_norm": 0.7394816875457764, "learning_rate": 2.016e-05, "loss": 0.1022, "step": 6723 }, { "epoch": 37.777464788732395, "grad_norm": 0.6031231880187988, "learning_rate": 2.0163e-05, "loss": 0.0743, "step": 6724 }, { "epoch": 37.7830985915493, "grad_norm": 0.5409649610519409, "learning_rate": 2.0166e-05, "loss": 0.0532, "step": 6725 }, { "epoch": 37.7887323943662, "grad_norm": 0.48864489793777466, "learning_rate": 2.0169e-05, "loss": 0.0541, "step": 6726 }, { "epoch": 37.7943661971831, "grad_norm": 0.9869182705879211, "learning_rate": 2.0172e-05, "loss": 0.0971, "step": 6727 }, { "epoch": 37.8, "grad_norm": 0.6580134630203247, "learning_rate": 2.0175e-05, "loss": 0.0564, "step": 6728 }, { "epoch": 37.8056338028169, "grad_norm": 0.5693881511688232, "learning_rate": 2.0178e-05, "loss": 0.0383, "step": 6729 }, { "epoch": 37.8112676056338, "grad_norm": 0.49909907579421997, "learning_rate": 2.0181e-05, "loss": 0.0427, "step": 6730 }, { "epoch": 37.816901408450704, "grad_norm": 0.5998145341873169, "learning_rate": 2.0184e-05, "loss": 0.0876, "step": 6731 }, { "epoch": 37.822535211267606, "grad_norm": 0.5604900121688843, "learning_rate": 2.0187000000000002e-05, "loss": 0.0832, "step": 6732 }, { "epoch": 37.82816901408451, "grad_norm": 0.5075482726097107, "learning_rate": 2.019e-05, "loss": 0.0445, "step": 6733 }, { "epoch": 37.83380281690141, "grad_norm": 0.691598117351532, "learning_rate": 2.0193e-05, "loss": 0.101, "step": 6734 }, { "epoch": 37.83943661971831, "grad_norm": 0.5945582985877991, "learning_rate": 2.0196e-05, "loss": 0.0396, "step": 6735 }, { "epoch": 37.84507042253521, "grad_norm": 0.4628247618675232, "learning_rate": 2.0199e-05, "loss": 0.0295, "step": 6736 }, { "epoch": 37.85070422535211, "grad_norm": 0.47769695520401, "learning_rate": 2.0202e-05, "loss": 0.0262, "step": 6737 }, { "epoch": 37.85633802816901, "grad_norm": 0.42791324853897095, "learning_rate": 2.0205e-05, "loss": 0.0212, "step": 6738 }, { "epoch": 37.861971830985915, "grad_norm": 0.6965246796607971, "learning_rate": 2.0208e-05, "loss": 0.0534, "step": 6739 }, { "epoch": 37.86760563380282, "grad_norm": 0.5003166198730469, "learning_rate": 2.0211e-05, "loss": 0.0204, "step": 6740 }, { "epoch": 37.87323943661972, "grad_norm": 1.104461908340454, "learning_rate": 2.0214e-05, "loss": 0.0557, "step": 6741 }, { "epoch": 37.87887323943662, "grad_norm": 0.7004292607307434, "learning_rate": 2.0217000000000003e-05, "loss": 0.0705, "step": 6742 }, { "epoch": 37.884507042253524, "grad_norm": 0.5784651637077332, "learning_rate": 2.0220000000000003e-05, "loss": 0.0478, "step": 6743 }, { "epoch": 37.89014084507042, "grad_norm": 0.8215164542198181, "learning_rate": 2.0223000000000003e-05, "loss": 0.0342, "step": 6744 }, { "epoch": 37.89577464788732, "grad_norm": 0.4326547384262085, "learning_rate": 2.0226000000000003e-05, "loss": 0.0246, "step": 6745 }, { "epoch": 37.901408450704224, "grad_norm": 0.43848761916160583, "learning_rate": 2.0229000000000002e-05, "loss": 0.0265, "step": 6746 }, { "epoch": 37.907042253521126, "grad_norm": 0.3843231499195099, "learning_rate": 2.0232000000000002e-05, "loss": 0.0133, "step": 6747 }, { "epoch": 37.91267605633803, "grad_norm": 0.43572133779525757, "learning_rate": 2.0235e-05, "loss": 0.0245, "step": 6748 }, { "epoch": 37.91830985915493, "grad_norm": 0.4336712658405304, "learning_rate": 2.0238e-05, "loss": 0.022, "step": 6749 }, { "epoch": 37.92394366197183, "grad_norm": 0.8156368732452393, "learning_rate": 2.0240999999999998e-05, "loss": 0.0262, "step": 6750 }, { "epoch": 37.929577464788736, "grad_norm": 0.45341411232948303, "learning_rate": 2.0243999999999998e-05, "loss": 0.0529, "step": 6751 }, { "epoch": 37.93521126760563, "grad_norm": 0.5952041745185852, "learning_rate": 2.0247e-05, "loss": 0.0375, "step": 6752 }, { "epoch": 37.94084507042253, "grad_norm": 0.6855674982070923, "learning_rate": 2.025e-05, "loss": 0.0162, "step": 6753 }, { "epoch": 37.946478873239435, "grad_norm": 0.6609663367271423, "learning_rate": 2.0253e-05, "loss": 0.0237, "step": 6754 }, { "epoch": 37.95211267605634, "grad_norm": 0.665765643119812, "learning_rate": 2.0256e-05, "loss": 0.0368, "step": 6755 }, { "epoch": 37.95774647887324, "grad_norm": 0.6505454778671265, "learning_rate": 2.0259e-05, "loss": 0.0135, "step": 6756 }, { "epoch": 37.96338028169014, "grad_norm": 0.288127601146698, "learning_rate": 2.0262e-05, "loss": 0.0118, "step": 6757 }, { "epoch": 37.969014084507045, "grad_norm": 0.7454851269721985, "learning_rate": 2.0265e-05, "loss": 0.0902, "step": 6758 }, { "epoch": 37.97464788732395, "grad_norm": 0.4316297173500061, "learning_rate": 2.0268e-05, "loss": 0.0156, "step": 6759 }, { "epoch": 37.98028169014084, "grad_norm": 0.8381431102752686, "learning_rate": 2.0271e-05, "loss": 0.0657, "step": 6760 }, { "epoch": 37.985915492957744, "grad_norm": 0.48815783858299255, "learning_rate": 2.0274e-05, "loss": 0.0333, "step": 6761 }, { "epoch": 37.99154929577465, "grad_norm": 0.42649728059768677, "learning_rate": 2.0277e-05, "loss": 0.0075, "step": 6762 }, { "epoch": 37.99718309859155, "grad_norm": 0.5080236196517944, "learning_rate": 2.0280000000000002e-05, "loss": 0.0471, "step": 6763 }, { "epoch": 38.0, "grad_norm": 0.5568985939025879, "learning_rate": 2.0283000000000002e-05, "loss": 0.0071, "step": 6764 }, { "epoch": 38.0056338028169, "grad_norm": 0.6751949191093445, "learning_rate": 2.0286000000000002e-05, "loss": 0.0861, "step": 6765 }, { "epoch": 38.011267605633805, "grad_norm": 0.5531784892082214, "learning_rate": 2.0289000000000002e-05, "loss": 0.0656, "step": 6766 }, { "epoch": 38.01690140845071, "grad_norm": 0.5917172431945801, "learning_rate": 2.0292e-05, "loss": 0.0808, "step": 6767 }, { "epoch": 38.02253521126761, "grad_norm": 0.8637703061103821, "learning_rate": 2.0295e-05, "loss": 0.0949, "step": 6768 }, { "epoch": 38.028169014084504, "grad_norm": 0.48848146200180054, "learning_rate": 2.0298e-05, "loss": 0.0375, "step": 6769 }, { "epoch": 38.03380281690141, "grad_norm": 0.4809871315956116, "learning_rate": 2.0301e-05, "loss": 0.0652, "step": 6770 }, { "epoch": 38.03943661971831, "grad_norm": 0.4351126253604889, "learning_rate": 2.0304e-05, "loss": 0.0683, "step": 6771 }, { "epoch": 38.04507042253521, "grad_norm": 0.7097147107124329, "learning_rate": 2.0307e-05, "loss": 0.0675, "step": 6772 }, { "epoch": 38.05070422535211, "grad_norm": 0.7285991311073303, "learning_rate": 2.031e-05, "loss": 0.0306, "step": 6773 }, { "epoch": 38.056338028169016, "grad_norm": 0.5019846558570862, "learning_rate": 2.0313e-05, "loss": 0.0517, "step": 6774 }, { "epoch": 38.06197183098592, "grad_norm": 0.5821499228477478, "learning_rate": 2.0316e-05, "loss": 0.0573, "step": 6775 }, { "epoch": 38.06760563380282, "grad_norm": 0.530942976474762, "learning_rate": 2.0319e-05, "loss": 0.0435, "step": 6776 }, { "epoch": 38.073239436619716, "grad_norm": 0.5353413224220276, "learning_rate": 2.0322e-05, "loss": 0.0883, "step": 6777 }, { "epoch": 38.07887323943662, "grad_norm": 0.4072878956794739, "learning_rate": 2.0325e-05, "loss": 0.042, "step": 6778 }, { "epoch": 38.08450704225352, "grad_norm": 0.5510742664337158, "learning_rate": 2.0328e-05, "loss": 0.0972, "step": 6779 }, { "epoch": 38.09014084507042, "grad_norm": 0.4391019940376282, "learning_rate": 2.0331e-05, "loss": 0.0366, "step": 6780 }, { "epoch": 38.095774647887325, "grad_norm": 0.5606752038002014, "learning_rate": 2.0334e-05, "loss": 0.0233, "step": 6781 }, { "epoch": 38.10140845070423, "grad_norm": 0.43519923090934753, "learning_rate": 2.0337e-05, "loss": 0.039, "step": 6782 }, { "epoch": 38.10704225352113, "grad_norm": 0.40706655383110046, "learning_rate": 2.0340000000000002e-05, "loss": 0.0274, "step": 6783 }, { "epoch": 38.11267605633803, "grad_norm": 0.2745266556739807, "learning_rate": 2.0343e-05, "loss": 0.0164, "step": 6784 }, { "epoch": 38.11830985915493, "grad_norm": 0.42930394411087036, "learning_rate": 2.0346e-05, "loss": 0.0181, "step": 6785 }, { "epoch": 38.12394366197183, "grad_norm": 0.5081039071083069, "learning_rate": 2.0349e-05, "loss": 0.0339, "step": 6786 }, { "epoch": 38.12957746478873, "grad_norm": 0.5804871916770935, "learning_rate": 2.0352e-05, "loss": 0.0282, "step": 6787 }, { "epoch": 38.135211267605634, "grad_norm": 0.3561040461063385, "learning_rate": 2.0355e-05, "loss": 0.0372, "step": 6788 }, { "epoch": 38.140845070422536, "grad_norm": 0.570104718208313, "learning_rate": 2.0358e-05, "loss": 0.0155, "step": 6789 }, { "epoch": 38.14647887323944, "grad_norm": 0.6399855613708496, "learning_rate": 2.0361e-05, "loss": 0.0319, "step": 6790 }, { "epoch": 38.15211267605634, "grad_norm": 0.3834407925605774, "learning_rate": 2.0364e-05, "loss": 0.0154, "step": 6791 }, { "epoch": 38.15774647887324, "grad_norm": 0.4630981385707855, "learning_rate": 2.0367e-05, "loss": 0.0158, "step": 6792 }, { "epoch": 38.16338028169014, "grad_norm": 0.3661392033100128, "learning_rate": 2.0370000000000003e-05, "loss": 0.0165, "step": 6793 }, { "epoch": 38.16901408450704, "grad_norm": 0.37484854459762573, "learning_rate": 2.0373000000000003e-05, "loss": 0.0492, "step": 6794 }, { "epoch": 38.17464788732394, "grad_norm": 0.4835563898086548, "learning_rate": 2.0376000000000003e-05, "loss": 0.0283, "step": 6795 }, { "epoch": 38.180281690140845, "grad_norm": 0.4565122723579407, "learning_rate": 2.0379000000000003e-05, "loss": 0.0179, "step": 6796 }, { "epoch": 38.18591549295775, "grad_norm": 0.7157280445098877, "learning_rate": 2.0382e-05, "loss": 0.0782, "step": 6797 }, { "epoch": 38.19154929577465, "grad_norm": 0.5994229316711426, "learning_rate": 2.0385e-05, "loss": 0.0593, "step": 6798 }, { "epoch": 38.19718309859155, "grad_norm": 0.6242246627807617, "learning_rate": 2.0388e-05, "loss": 0.0205, "step": 6799 }, { "epoch": 38.202816901408454, "grad_norm": 0.46142521500587463, "learning_rate": 2.0391e-05, "loss": 0.034, "step": 6800 }, { "epoch": 38.20845070422535, "grad_norm": 0.35941600799560547, "learning_rate": 2.0393999999999998e-05, "loss": 0.0188, "step": 6801 }, { "epoch": 38.21408450704225, "grad_norm": 0.6080695390701294, "learning_rate": 2.0396999999999998e-05, "loss": 0.0345, "step": 6802 }, { "epoch": 38.219718309859154, "grad_norm": 0.4642675220966339, "learning_rate": 2.04e-05, "loss": 0.0193, "step": 6803 }, { "epoch": 38.225352112676056, "grad_norm": 0.7943758368492126, "learning_rate": 2.0403e-05, "loss": 0.0101, "step": 6804 }, { "epoch": 38.23098591549296, "grad_norm": 0.47398918867111206, "learning_rate": 2.0406e-05, "loss": 0.0106, "step": 6805 }, { "epoch": 38.23661971830986, "grad_norm": 1.8236685991287231, "learning_rate": 2.0409e-05, "loss": 0.0204, "step": 6806 }, { "epoch": 38.24225352112676, "grad_norm": 0.43615037202835083, "learning_rate": 2.0412e-05, "loss": 0.026, "step": 6807 }, { "epoch": 38.247887323943665, "grad_norm": 0.5097202062606812, "learning_rate": 2.0415e-05, "loss": 0.0159, "step": 6808 }, { "epoch": 38.25352112676056, "grad_norm": 0.7029985785484314, "learning_rate": 2.0418e-05, "loss": 0.1662, "step": 6809 }, { "epoch": 38.25915492957746, "grad_norm": 17.49290657043457, "learning_rate": 2.0421e-05, "loss": 0.0924, "step": 6810 }, { "epoch": 38.264788732394365, "grad_norm": 1.410933256149292, "learning_rate": 2.0424e-05, "loss": 0.0767, "step": 6811 }, { "epoch": 38.27042253521127, "grad_norm": 0.7073354125022888, "learning_rate": 2.0427e-05, "loss": 0.0796, "step": 6812 }, { "epoch": 38.27605633802817, "grad_norm": 0.41530755162239075, "learning_rate": 2.0430000000000003e-05, "loss": 0.0606, "step": 6813 }, { "epoch": 38.28169014084507, "grad_norm": 0.5682529807090759, "learning_rate": 2.0433000000000002e-05, "loss": 0.0581, "step": 6814 }, { "epoch": 38.287323943661974, "grad_norm": 0.5812345147132874, "learning_rate": 2.0436000000000002e-05, "loss": 0.0902, "step": 6815 }, { "epoch": 38.292957746478876, "grad_norm": 0.617973804473877, "learning_rate": 2.0439000000000002e-05, "loss": 0.0558, "step": 6816 }, { "epoch": 38.29859154929577, "grad_norm": 0.42676761746406555, "learning_rate": 2.0442000000000002e-05, "loss": 0.0371, "step": 6817 }, { "epoch": 38.304225352112674, "grad_norm": 0.6036847233772278, "learning_rate": 2.0445e-05, "loss": 0.055, "step": 6818 }, { "epoch": 38.309859154929576, "grad_norm": 0.5863431096076965, "learning_rate": 2.0448e-05, "loss": 0.0664, "step": 6819 }, { "epoch": 38.31549295774648, "grad_norm": 0.8448296189308167, "learning_rate": 2.0451e-05, "loss": 0.0385, "step": 6820 }, { "epoch": 38.32112676056338, "grad_norm": 0.5159189105033875, "learning_rate": 2.0454e-05, "loss": 0.0276, "step": 6821 }, { "epoch": 38.32676056338028, "grad_norm": 0.6634547710418701, "learning_rate": 2.0456999999999997e-05, "loss": 0.0488, "step": 6822 }, { "epoch": 38.332394366197185, "grad_norm": 0.48076769709587097, "learning_rate": 2.046e-05, "loss": 0.0402, "step": 6823 }, { "epoch": 38.33802816901409, "grad_norm": 0.46188679337501526, "learning_rate": 2.0463e-05, "loss": 0.0286, "step": 6824 }, { "epoch": 38.34366197183098, "grad_norm": 0.43719932436943054, "learning_rate": 2.0466e-05, "loss": 0.0399, "step": 6825 }, { "epoch": 38.349295774647885, "grad_norm": 0.7780287265777588, "learning_rate": 2.0469e-05, "loss": 0.0295, "step": 6826 }, { "epoch": 38.35492957746479, "grad_norm": 0.6702249646186829, "learning_rate": 2.0472e-05, "loss": 0.0367, "step": 6827 }, { "epoch": 38.36056338028169, "grad_norm": 0.5512853264808655, "learning_rate": 2.0475e-05, "loss": 0.0391, "step": 6828 }, { "epoch": 38.36619718309859, "grad_norm": 0.4514889717102051, "learning_rate": 2.0478e-05, "loss": 0.0192, "step": 6829 }, { "epoch": 38.371830985915494, "grad_norm": 0.33734843134880066, "learning_rate": 2.0481e-05, "loss": 0.0156, "step": 6830 }, { "epoch": 38.3774647887324, "grad_norm": 1.1632055044174194, "learning_rate": 2.0484e-05, "loss": 0.0346, "step": 6831 }, { "epoch": 38.3830985915493, "grad_norm": 1.1631197929382324, "learning_rate": 2.0487e-05, "loss": 0.018, "step": 6832 }, { "epoch": 38.388732394366194, "grad_norm": 0.3285568654537201, "learning_rate": 2.0490000000000002e-05, "loss": 0.0362, "step": 6833 }, { "epoch": 38.394366197183096, "grad_norm": 0.49475109577178955, "learning_rate": 2.0493000000000002e-05, "loss": 0.0227, "step": 6834 }, { "epoch": 38.4, "grad_norm": 0.6912474632263184, "learning_rate": 2.0496e-05, "loss": 0.0676, "step": 6835 }, { "epoch": 38.4056338028169, "grad_norm": 2.9253146648406982, "learning_rate": 2.0499e-05, "loss": 0.0135, "step": 6836 }, { "epoch": 38.4112676056338, "grad_norm": 0.970075249671936, "learning_rate": 2.0502e-05, "loss": 0.0406, "step": 6837 }, { "epoch": 38.416901408450705, "grad_norm": 1.1126623153686523, "learning_rate": 2.0505e-05, "loss": 0.0286, "step": 6838 }, { "epoch": 38.42253521126761, "grad_norm": 0.3493662178516388, "learning_rate": 2.0508e-05, "loss": 0.0134, "step": 6839 }, { "epoch": 38.42816901408451, "grad_norm": 0.6518783569335938, "learning_rate": 2.0511e-05, "loss": 0.0095, "step": 6840 }, { "epoch": 38.433802816901405, "grad_norm": 0.9401983618736267, "learning_rate": 2.0514e-05, "loss": 0.0526, "step": 6841 }, { "epoch": 38.43943661971831, "grad_norm": 1.3513484001159668, "learning_rate": 2.0517e-05, "loss": 0.0495, "step": 6842 }, { "epoch": 38.44507042253521, "grad_norm": 0.7823395729064941, "learning_rate": 2.0520000000000003e-05, "loss": 0.0236, "step": 6843 }, { "epoch": 38.45070422535211, "grad_norm": 0.5789241194725037, "learning_rate": 2.0523000000000003e-05, "loss": 0.0166, "step": 6844 }, { "epoch": 38.456338028169014, "grad_norm": 1.6388907432556152, "learning_rate": 2.0526000000000003e-05, "loss": 0.036, "step": 6845 }, { "epoch": 38.46197183098592, "grad_norm": 0.6615618467330933, "learning_rate": 2.0529e-05, "loss": 0.0332, "step": 6846 }, { "epoch": 38.46760563380282, "grad_norm": 0.733688473701477, "learning_rate": 2.0532e-05, "loss": 0.0306, "step": 6847 }, { "epoch": 38.47323943661972, "grad_norm": 0.45233461260795593, "learning_rate": 2.0535e-05, "loss": 0.0118, "step": 6848 }, { "epoch": 38.478873239436616, "grad_norm": 0.4297047257423401, "learning_rate": 2.0538e-05, "loss": 0.0098, "step": 6849 }, { "epoch": 38.48450704225352, "grad_norm": 2.32468581199646, "learning_rate": 2.0541e-05, "loss": 0.1076, "step": 6850 }, { "epoch": 38.49014084507042, "grad_norm": 0.48863106966018677, "learning_rate": 2.0544e-05, "loss": 0.0345, "step": 6851 }, { "epoch": 38.49577464788732, "grad_norm": 1.0799311399459839, "learning_rate": 2.0546999999999998e-05, "loss": 0.0295, "step": 6852 }, { "epoch": 38.501408450704226, "grad_norm": 2.9735348224639893, "learning_rate": 2.055e-05, "loss": 0.1909, "step": 6853 }, { "epoch": 38.50704225352113, "grad_norm": 3.904378652572632, "learning_rate": 2.0553e-05, "loss": 0.1645, "step": 6854 }, { "epoch": 38.51267605633803, "grad_norm": 2.3798930644989014, "learning_rate": 2.0556e-05, "loss": 0.0886, "step": 6855 }, { "epoch": 38.51830985915493, "grad_norm": 0.5291370153427124, "learning_rate": 2.0559e-05, "loss": 0.079, "step": 6856 }, { "epoch": 38.52394366197183, "grad_norm": 1.0964303016662598, "learning_rate": 2.0562e-05, "loss": 0.1017, "step": 6857 }, { "epoch": 38.52957746478873, "grad_norm": 1.401548147201538, "learning_rate": 2.0565e-05, "loss": 0.091, "step": 6858 }, { "epoch": 38.53521126760563, "grad_norm": 0.5544741749763489, "learning_rate": 2.0568e-05, "loss": 0.0816, "step": 6859 }, { "epoch": 38.540845070422534, "grad_norm": 0.7993527054786682, "learning_rate": 2.0571e-05, "loss": 0.1238, "step": 6860 }, { "epoch": 38.54647887323944, "grad_norm": 3.9132158756256104, "learning_rate": 2.0574e-05, "loss": 0.2617, "step": 6861 }, { "epoch": 38.55211267605634, "grad_norm": 1.1395249366760254, "learning_rate": 2.0577e-05, "loss": 0.109, "step": 6862 }, { "epoch": 38.55774647887324, "grad_norm": 2.091203212738037, "learning_rate": 2.0580000000000003e-05, "loss": 0.0771, "step": 6863 }, { "epoch": 38.563380281690144, "grad_norm": 0.6181694269180298, "learning_rate": 2.0583000000000003e-05, "loss": 0.0479, "step": 6864 }, { "epoch": 38.56901408450704, "grad_norm": 0.7932649254798889, "learning_rate": 2.0586000000000002e-05, "loss": 0.1079, "step": 6865 }, { "epoch": 38.57464788732394, "grad_norm": 0.7280565500259399, "learning_rate": 2.0589000000000002e-05, "loss": 0.0836, "step": 6866 }, { "epoch": 38.58028169014084, "grad_norm": 0.6084385514259338, "learning_rate": 2.0592000000000002e-05, "loss": 0.0312, "step": 6867 }, { "epoch": 38.585915492957746, "grad_norm": 0.5952422022819519, "learning_rate": 2.0595000000000002e-05, "loss": 0.0412, "step": 6868 }, { "epoch": 38.59154929577465, "grad_norm": 0.7939813733100891, "learning_rate": 2.0598e-05, "loss": 0.0428, "step": 6869 }, { "epoch": 38.59718309859155, "grad_norm": 0.6962659358978271, "learning_rate": 2.0601e-05, "loss": 0.0548, "step": 6870 }, { "epoch": 38.60281690140845, "grad_norm": 0.45800483226776123, "learning_rate": 2.0603999999999998e-05, "loss": 0.0353, "step": 6871 }, { "epoch": 38.608450704225355, "grad_norm": 0.8061509132385254, "learning_rate": 2.0606999999999998e-05, "loss": 0.0433, "step": 6872 }, { "epoch": 38.61408450704225, "grad_norm": 0.45862463116645813, "learning_rate": 2.061e-05, "loss": 0.0364, "step": 6873 }, { "epoch": 38.61971830985915, "grad_norm": 0.4486182928085327, "learning_rate": 2.0613e-05, "loss": 0.0317, "step": 6874 }, { "epoch": 38.625352112676055, "grad_norm": 0.8446391224861145, "learning_rate": 2.0616e-05, "loss": 0.0391, "step": 6875 }, { "epoch": 38.63098591549296, "grad_norm": 0.41973263025283813, "learning_rate": 2.0619e-05, "loss": 0.0159, "step": 6876 }, { "epoch": 38.63661971830986, "grad_norm": 0.7292184829711914, "learning_rate": 2.0622e-05, "loss": 0.0505, "step": 6877 }, { "epoch": 38.64225352112676, "grad_norm": 0.6393370628356934, "learning_rate": 2.0625e-05, "loss": 0.0277, "step": 6878 }, { "epoch": 38.647887323943664, "grad_norm": 0.39132627844810486, "learning_rate": 2.0628e-05, "loss": 0.0204, "step": 6879 }, { "epoch": 38.653521126760566, "grad_norm": 1.2014517784118652, "learning_rate": 2.0631e-05, "loss": 0.046, "step": 6880 }, { "epoch": 38.65915492957747, "grad_norm": 0.4479469656944275, "learning_rate": 2.0634e-05, "loss": 0.0144, "step": 6881 }, { "epoch": 38.66478873239436, "grad_norm": 0.3557812571525574, "learning_rate": 2.0637e-05, "loss": 0.0191, "step": 6882 }, { "epoch": 38.670422535211266, "grad_norm": 0.647489607334137, "learning_rate": 2.064e-05, "loss": 0.0156, "step": 6883 }, { "epoch": 38.67605633802817, "grad_norm": 0.36688968539237976, "learning_rate": 2.0643000000000002e-05, "loss": 0.0405, "step": 6884 }, { "epoch": 38.68169014084507, "grad_norm": 0.5117655396461487, "learning_rate": 2.0646000000000002e-05, "loss": 0.0497, "step": 6885 }, { "epoch": 38.68732394366197, "grad_norm": 0.5694383382797241, "learning_rate": 2.0649e-05, "loss": 0.0174, "step": 6886 }, { "epoch": 38.692957746478875, "grad_norm": 0.4169192612171173, "learning_rate": 2.0652e-05, "loss": 0.0346, "step": 6887 }, { "epoch": 38.69859154929578, "grad_norm": 0.6681201457977295, "learning_rate": 2.0655e-05, "loss": 0.0222, "step": 6888 }, { "epoch": 38.70422535211267, "grad_norm": 1.1668353080749512, "learning_rate": 2.0658e-05, "loss": 0.0177, "step": 6889 }, { "epoch": 38.709859154929575, "grad_norm": 0.470647931098938, "learning_rate": 2.0661e-05, "loss": 0.0257, "step": 6890 }, { "epoch": 38.71549295774648, "grad_norm": 0.431679368019104, "learning_rate": 2.0664e-05, "loss": 0.0102, "step": 6891 }, { "epoch": 38.72112676056338, "grad_norm": 1.086158037185669, "learning_rate": 2.0667e-05, "loss": 0.0588, "step": 6892 }, { "epoch": 38.72676056338028, "grad_norm": 0.33252549171447754, "learning_rate": 2.067e-05, "loss": 0.009, "step": 6893 }, { "epoch": 38.732394366197184, "grad_norm": 0.865179181098938, "learning_rate": 2.0673000000000003e-05, "loss": 0.0437, "step": 6894 }, { "epoch": 38.738028169014086, "grad_norm": 0.5797864198684692, "learning_rate": 2.0676e-05, "loss": 0.0212, "step": 6895 }, { "epoch": 38.74366197183099, "grad_norm": 0.3475152850151062, "learning_rate": 2.0679e-05, "loss": 0.0118, "step": 6896 }, { "epoch": 38.74929577464789, "grad_norm": 1.0185235738754272, "learning_rate": 2.0682e-05, "loss": 0.1335, "step": 6897 }, { "epoch": 38.754929577464786, "grad_norm": 0.6892296671867371, "learning_rate": 2.0685e-05, "loss": 0.0849, "step": 6898 }, { "epoch": 38.76056338028169, "grad_norm": 0.6174803376197815, "learning_rate": 2.0688e-05, "loss": 0.0922, "step": 6899 }, { "epoch": 38.76619718309859, "grad_norm": 0.5313282608985901, "learning_rate": 2.0691e-05, "loss": 0.0707, "step": 6900 }, { "epoch": 38.77183098591549, "grad_norm": 0.8400399088859558, "learning_rate": 2.0694e-05, "loss": 0.136, "step": 6901 }, { "epoch": 38.777464788732395, "grad_norm": 0.8214924931526184, "learning_rate": 2.0697e-05, "loss": 0.1158, "step": 6902 }, { "epoch": 38.7830985915493, "grad_norm": 0.5002476572990417, "learning_rate": 2.07e-05, "loss": 0.0533, "step": 6903 }, { "epoch": 38.7887323943662, "grad_norm": 0.6017563343048096, "learning_rate": 2.0703e-05, "loss": 0.0526, "step": 6904 }, { "epoch": 38.7943661971831, "grad_norm": 0.6084911227226257, "learning_rate": 2.0706e-05, "loss": 0.0625, "step": 6905 }, { "epoch": 38.8, "grad_norm": 0.5878543853759766, "learning_rate": 2.0709e-05, "loss": 0.0729, "step": 6906 }, { "epoch": 38.8056338028169, "grad_norm": 0.7131029367446899, "learning_rate": 2.0712e-05, "loss": 0.0533, "step": 6907 }, { "epoch": 38.8112676056338, "grad_norm": 0.545417308807373, "learning_rate": 2.0715e-05, "loss": 0.0432, "step": 6908 }, { "epoch": 38.816901408450704, "grad_norm": 0.4532319903373718, "learning_rate": 2.0718e-05, "loss": 0.0324, "step": 6909 }, { "epoch": 38.822535211267606, "grad_norm": 0.4136486351490021, "learning_rate": 2.0721e-05, "loss": 0.0341, "step": 6910 }, { "epoch": 38.82816901408451, "grad_norm": 0.4366842806339264, "learning_rate": 2.0724e-05, "loss": 0.0322, "step": 6911 }, { "epoch": 38.83380281690141, "grad_norm": 0.7204238772392273, "learning_rate": 2.0727e-05, "loss": 0.0526, "step": 6912 }, { "epoch": 38.83943661971831, "grad_norm": 0.730279803276062, "learning_rate": 2.073e-05, "loss": 0.0702, "step": 6913 }, { "epoch": 38.84507042253521, "grad_norm": 0.4593850374221802, "learning_rate": 2.0733000000000003e-05, "loss": 0.0227, "step": 6914 }, { "epoch": 38.85070422535211, "grad_norm": 0.4842495918273926, "learning_rate": 2.0736000000000003e-05, "loss": 0.0271, "step": 6915 }, { "epoch": 38.85633802816901, "grad_norm": 1.7327898740768433, "learning_rate": 2.0739000000000003e-05, "loss": 0.0217, "step": 6916 }, { "epoch": 38.861971830985915, "grad_norm": 0.7462992072105408, "learning_rate": 2.0742000000000002e-05, "loss": 0.0761, "step": 6917 }, { "epoch": 38.86760563380282, "grad_norm": 0.48616015911102295, "learning_rate": 2.0745000000000002e-05, "loss": 0.0268, "step": 6918 }, { "epoch": 38.87323943661972, "grad_norm": 0.6256186962127686, "learning_rate": 2.0748000000000002e-05, "loss": 0.0706, "step": 6919 }, { "epoch": 38.87887323943662, "grad_norm": 0.4332086145877838, "learning_rate": 2.0751e-05, "loss": 0.0203, "step": 6920 }, { "epoch": 38.884507042253524, "grad_norm": 0.6780999898910522, "learning_rate": 2.0753999999999998e-05, "loss": 0.0301, "step": 6921 }, { "epoch": 38.89014084507042, "grad_norm": 0.4350431263446808, "learning_rate": 2.0756999999999998e-05, "loss": 0.0186, "step": 6922 }, { "epoch": 38.89577464788732, "grad_norm": 0.44273972511291504, "learning_rate": 2.0759999999999998e-05, "loss": 0.0174, "step": 6923 }, { "epoch": 38.901408450704224, "grad_norm": 0.4826180934906006, "learning_rate": 2.0763e-05, "loss": 0.016, "step": 6924 }, { "epoch": 38.907042253521126, "grad_norm": 0.5964898467063904, "learning_rate": 2.0766e-05, "loss": 0.0185, "step": 6925 }, { "epoch": 38.91267605633803, "grad_norm": 0.479593962430954, "learning_rate": 2.0769e-05, "loss": 0.0266, "step": 6926 }, { "epoch": 38.91830985915493, "grad_norm": 0.49397504329681396, "learning_rate": 2.0772e-05, "loss": 0.0349, "step": 6927 }, { "epoch": 38.92394366197183, "grad_norm": 0.29253605008125305, "learning_rate": 2.0775e-05, "loss": 0.0096, "step": 6928 }, { "epoch": 38.929577464788736, "grad_norm": 0.31014111638069153, "learning_rate": 2.0778e-05, "loss": 0.033, "step": 6929 }, { "epoch": 38.93521126760563, "grad_norm": 0.5397705435752869, "learning_rate": 2.0781e-05, "loss": 0.0358, "step": 6930 }, { "epoch": 38.94084507042253, "grad_norm": 0.4145379066467285, "learning_rate": 2.0784e-05, "loss": 0.0103, "step": 6931 }, { "epoch": 38.946478873239435, "grad_norm": 0.5462891459465027, "learning_rate": 2.0787e-05, "loss": 0.0157, "step": 6932 }, { "epoch": 38.95211267605634, "grad_norm": 0.867237389087677, "learning_rate": 2.079e-05, "loss": 0.0473, "step": 6933 }, { "epoch": 38.95774647887324, "grad_norm": 0.6085178256034851, "learning_rate": 2.0793000000000002e-05, "loss": 0.0123, "step": 6934 }, { "epoch": 38.96338028169014, "grad_norm": 0.41174107789993286, "learning_rate": 2.0796000000000002e-05, "loss": 0.0256, "step": 6935 }, { "epoch": 38.969014084507045, "grad_norm": 0.4643528163433075, "learning_rate": 2.0799000000000002e-05, "loss": 0.074, "step": 6936 }, { "epoch": 38.97464788732395, "grad_norm": 0.4514641761779785, "learning_rate": 2.0802000000000002e-05, "loss": 0.009, "step": 6937 }, { "epoch": 38.98028169014084, "grad_norm": 0.7388227581977844, "learning_rate": 2.0805e-05, "loss": 0.0176, "step": 6938 }, { "epoch": 38.985915492957744, "grad_norm": 0.3626881241798401, "learning_rate": 2.0808e-05, "loss": 0.0075, "step": 6939 }, { "epoch": 38.99154929577465, "grad_norm": 1.8748831748962402, "learning_rate": 2.0811e-05, "loss": 0.0091, "step": 6940 }, { "epoch": 38.99718309859155, "grad_norm": 0.6597514748573303, "learning_rate": 2.0814e-05, "loss": 0.0472, "step": 6941 }, { "epoch": 39.0, "grad_norm": 0.30502569675445557, "learning_rate": 2.0817e-05, "loss": 0.0075, "step": 6942 }, { "epoch": 39.0056338028169, "grad_norm": 0.7066483497619629, "learning_rate": 2.082e-05, "loss": 0.103, "step": 6943 }, { "epoch": 39.011267605633805, "grad_norm": 0.585061252117157, "learning_rate": 2.0823e-05, "loss": 0.0699, "step": 6944 }, { "epoch": 39.01690140845071, "grad_norm": 0.49748870730400085, "learning_rate": 2.0826e-05, "loss": 0.0742, "step": 6945 }, { "epoch": 39.02253521126761, "grad_norm": 0.7853361964225769, "learning_rate": 2.0829e-05, "loss": 0.0969, "step": 6946 }, { "epoch": 39.028169014084504, "grad_norm": 0.4722197353839874, "learning_rate": 2.0832e-05, "loss": 0.0726, "step": 6947 }, { "epoch": 39.03380281690141, "grad_norm": 0.6645857691764832, "learning_rate": 2.0835e-05, "loss": 0.0717, "step": 6948 }, { "epoch": 39.03943661971831, "grad_norm": 0.595655083656311, "learning_rate": 2.0838e-05, "loss": 0.0691, "step": 6949 }, { "epoch": 39.04507042253521, "grad_norm": 0.45026400685310364, "learning_rate": 2.0841e-05, "loss": 0.0522, "step": 6950 }, { "epoch": 39.05070422535211, "grad_norm": 0.4684717655181885, "learning_rate": 2.0844e-05, "loss": 0.0924, "step": 6951 }, { "epoch": 39.056338028169016, "grad_norm": 0.4373380243778229, "learning_rate": 2.0847e-05, "loss": 0.0336, "step": 6952 }, { "epoch": 39.06197183098592, "grad_norm": 0.43478575348854065, "learning_rate": 2.085e-05, "loss": 0.0533, "step": 6953 }, { "epoch": 39.06760563380282, "grad_norm": 0.3952817916870117, "learning_rate": 2.0853000000000002e-05, "loss": 0.0203, "step": 6954 }, { "epoch": 39.073239436619716, "grad_norm": 0.594340443611145, "learning_rate": 2.0856e-05, "loss": 0.0374, "step": 6955 }, { "epoch": 39.07887323943662, "grad_norm": 0.5485698580741882, "learning_rate": 2.0859e-05, "loss": 0.0977, "step": 6956 }, { "epoch": 39.08450704225352, "grad_norm": 0.47475704550743103, "learning_rate": 2.0862e-05, "loss": 0.0506, "step": 6957 }, { "epoch": 39.09014084507042, "grad_norm": 0.4719747006893158, "learning_rate": 2.0865e-05, "loss": 0.0614, "step": 6958 }, { "epoch": 39.095774647887325, "grad_norm": 0.6333062648773193, "learning_rate": 2.0868e-05, "loss": 0.028, "step": 6959 }, { "epoch": 39.10140845070423, "grad_norm": 0.4870012402534485, "learning_rate": 2.0871e-05, "loss": 0.022, "step": 6960 }, { "epoch": 39.10704225352113, "grad_norm": 0.7816788554191589, "learning_rate": 2.0874e-05, "loss": 0.0416, "step": 6961 }, { "epoch": 39.11267605633803, "grad_norm": 0.4535878598690033, "learning_rate": 2.0877e-05, "loss": 0.023, "step": 6962 }, { "epoch": 39.11830985915493, "grad_norm": 0.5035465955734253, "learning_rate": 2.088e-05, "loss": 0.0481, "step": 6963 }, { "epoch": 39.12394366197183, "grad_norm": 0.4461061954498291, "learning_rate": 2.0883000000000003e-05, "loss": 0.0454, "step": 6964 }, { "epoch": 39.12957746478873, "grad_norm": 0.3623048961162567, "learning_rate": 2.0886000000000003e-05, "loss": 0.0264, "step": 6965 }, { "epoch": 39.135211267605634, "grad_norm": 0.4493294954299927, "learning_rate": 2.0889000000000003e-05, "loss": 0.0206, "step": 6966 }, { "epoch": 39.140845070422536, "grad_norm": 0.6417010426521301, "learning_rate": 2.0892000000000003e-05, "loss": 0.0125, "step": 6967 }, { "epoch": 39.14647887323944, "grad_norm": 0.5092276334762573, "learning_rate": 2.0895000000000002e-05, "loss": 0.0157, "step": 6968 }, { "epoch": 39.15211267605634, "grad_norm": 0.4065658748149872, "learning_rate": 2.0898e-05, "loss": 0.0172, "step": 6969 }, { "epoch": 39.15774647887324, "grad_norm": 0.5819499492645264, "learning_rate": 2.0901e-05, "loss": 0.0188, "step": 6970 }, { "epoch": 39.16338028169014, "grad_norm": 0.37160399556159973, "learning_rate": 2.0904e-05, "loss": 0.0143, "step": 6971 }, { "epoch": 39.16901408450704, "grad_norm": 0.6002311110496521, "learning_rate": 2.0906999999999998e-05, "loss": 0.0564, "step": 6972 }, { "epoch": 39.17464788732394, "grad_norm": 0.5788276195526123, "learning_rate": 2.0909999999999998e-05, "loss": 0.0332, "step": 6973 }, { "epoch": 39.180281690140845, "grad_norm": 0.6972548961639404, "learning_rate": 2.0913e-05, "loss": 0.0193, "step": 6974 }, { "epoch": 39.18591549295775, "grad_norm": 0.744331419467926, "learning_rate": 2.0916e-05, "loss": 0.0707, "step": 6975 }, { "epoch": 39.19154929577465, "grad_norm": 0.4509507417678833, "learning_rate": 2.0919e-05, "loss": 0.0109, "step": 6976 }, { "epoch": 39.19718309859155, "grad_norm": 0.39170730113983154, "learning_rate": 2.0922e-05, "loss": 0.0241, "step": 6977 }, { "epoch": 39.202816901408454, "grad_norm": 0.35179606080055237, "learning_rate": 2.0925e-05, "loss": 0.0248, "step": 6978 }, { "epoch": 39.20845070422535, "grad_norm": 0.5725632309913635, "learning_rate": 2.0928e-05, "loss": 0.0183, "step": 6979 }, { "epoch": 39.21408450704225, "grad_norm": 0.3584320545196533, "learning_rate": 2.0931e-05, "loss": 0.0086, "step": 6980 }, { "epoch": 39.219718309859154, "grad_norm": 0.4977027177810669, "learning_rate": 2.0934e-05, "loss": 0.0057, "step": 6981 }, { "epoch": 39.225352112676056, "grad_norm": 0.49265506863594055, "learning_rate": 2.0937e-05, "loss": 0.0513, "step": 6982 }, { "epoch": 39.23098591549296, "grad_norm": 0.4012896716594696, "learning_rate": 2.094e-05, "loss": 0.0093, "step": 6983 }, { "epoch": 39.23661971830986, "grad_norm": 0.4605891704559326, "learning_rate": 2.0943000000000003e-05, "loss": 0.0081, "step": 6984 }, { "epoch": 39.24225352112676, "grad_norm": 0.30886271595954895, "learning_rate": 2.0946000000000002e-05, "loss": 0.0057, "step": 6985 }, { "epoch": 39.247887323943665, "grad_norm": 0.6322938799858093, "learning_rate": 2.0949000000000002e-05, "loss": 0.0182, "step": 6986 }, { "epoch": 39.25352112676056, "grad_norm": 0.746375560760498, "learning_rate": 2.0952000000000002e-05, "loss": 0.0814, "step": 6987 }, { "epoch": 39.25915492957746, "grad_norm": 0.7024857997894287, "learning_rate": 2.0955000000000002e-05, "loss": 0.1273, "step": 6988 }, { "epoch": 39.264788732394365, "grad_norm": 0.45854514837265015, "learning_rate": 2.0958e-05, "loss": 0.0553, "step": 6989 }, { "epoch": 39.27042253521127, "grad_norm": 0.520872175693512, "learning_rate": 2.0961e-05, "loss": 0.0642, "step": 6990 }, { "epoch": 39.27605633802817, "grad_norm": 0.617736279964447, "learning_rate": 2.0964e-05, "loss": 0.1146, "step": 6991 }, { "epoch": 39.28169014084507, "grad_norm": 0.9250397682189941, "learning_rate": 2.0967e-05, "loss": 0.08, "step": 6992 }, { "epoch": 39.287323943661974, "grad_norm": 0.5696887969970703, "learning_rate": 2.097e-05, "loss": 0.0472, "step": 6993 }, { "epoch": 39.292957746478876, "grad_norm": 0.9004114866256714, "learning_rate": 2.0973e-05, "loss": 0.1269, "step": 6994 }, { "epoch": 39.29859154929577, "grad_norm": 0.5800080299377441, "learning_rate": 2.0976e-05, "loss": 0.0839, "step": 6995 }, { "epoch": 39.304225352112674, "grad_norm": 0.41982153058052063, "learning_rate": 2.0979e-05, "loss": 0.0293, "step": 6996 }, { "epoch": 39.309859154929576, "grad_norm": 0.5525029897689819, "learning_rate": 2.0982e-05, "loss": 0.0621, "step": 6997 }, { "epoch": 39.31549295774648, "grad_norm": 0.47864803671836853, "learning_rate": 2.0985e-05, "loss": 0.0352, "step": 6998 }, { "epoch": 39.32112676056338, "grad_norm": 0.5492961406707764, "learning_rate": 2.0988e-05, "loss": 0.0403, "step": 6999 }, { "epoch": 39.32676056338028, "grad_norm": 0.4997711181640625, "learning_rate": 2.0991e-05, "loss": 0.0625, "step": 7000 }, { "epoch": 39.32676056338028, "eval_cer": 0.11029888275397258, "eval_loss": 0.4393518269062042, "eval_runtime": 15.8646, "eval_samples_per_second": 19.162, "eval_steps_per_second": 0.63, "eval_wer": 0.3820030698388335, "step": 7000 }, { "epoch": 39.332394366197185, "grad_norm": 0.39277127385139465, "learning_rate": 2.0994e-05, "loss": 0.0289, "step": 7001 }, { "epoch": 39.33802816901409, "grad_norm": 0.44185754656791687, "learning_rate": 2.0997e-05, "loss": 0.0621, "step": 7002 }, { "epoch": 39.34366197183098, "grad_norm": 0.406377911567688, "learning_rate": 2.1e-05, "loss": 0.053, "step": 7003 }, { "epoch": 39.349295774647885, "grad_norm": 0.49477025866508484, "learning_rate": 2.1003e-05, "loss": 0.0405, "step": 7004 }, { "epoch": 39.35492957746479, "grad_norm": 0.7025056481361389, "learning_rate": 2.1006000000000002e-05, "loss": 0.0446, "step": 7005 }, { "epoch": 39.36056338028169, "grad_norm": 0.5486474633216858, "learning_rate": 2.1009e-05, "loss": 0.0174, "step": 7006 }, { "epoch": 39.36619718309859, "grad_norm": 1.919355869293213, "learning_rate": 2.1012e-05, "loss": 0.0148, "step": 7007 }, { "epoch": 39.371830985915494, "grad_norm": 0.6490148305892944, "learning_rate": 2.1015e-05, "loss": 0.0251, "step": 7008 }, { "epoch": 39.3774647887324, "grad_norm": 0.6395863890647888, "learning_rate": 2.1018e-05, "loss": 0.0455, "step": 7009 }, { "epoch": 39.3830985915493, "grad_norm": 0.5009157657623291, "learning_rate": 2.1021e-05, "loss": 0.0319, "step": 7010 }, { "epoch": 39.388732394366194, "grad_norm": 0.38221874833106995, "learning_rate": 2.1024e-05, "loss": 0.0147, "step": 7011 }, { "epoch": 39.394366197183096, "grad_norm": 0.6211214661598206, "learning_rate": 2.1027e-05, "loss": 0.0459, "step": 7012 }, { "epoch": 39.4, "grad_norm": 0.5114400386810303, "learning_rate": 2.103e-05, "loss": 0.0129, "step": 7013 }, { "epoch": 39.4056338028169, "grad_norm": 0.48928746581077576, "learning_rate": 2.1033e-05, "loss": 0.0145, "step": 7014 }, { "epoch": 39.4112676056338, "grad_norm": 0.8506745100021362, "learning_rate": 2.1036000000000003e-05, "loss": 0.0221, "step": 7015 }, { "epoch": 39.416901408450705, "grad_norm": 0.49922460317611694, "learning_rate": 2.1039000000000003e-05, "loss": 0.0139, "step": 7016 }, { "epoch": 39.42253521126761, "grad_norm": 0.33931106328964233, "learning_rate": 2.1042000000000003e-05, "loss": 0.0093, "step": 7017 }, { "epoch": 39.42816901408451, "grad_norm": 0.5611737370491028, "learning_rate": 2.1045e-05, "loss": 0.0223, "step": 7018 }, { "epoch": 39.433802816901405, "grad_norm": 0.6113772988319397, "learning_rate": 2.1048e-05, "loss": 0.0077, "step": 7019 }, { "epoch": 39.43943661971831, "grad_norm": 0.46669235825538635, "learning_rate": 2.1051e-05, "loss": 0.0337, "step": 7020 }, { "epoch": 39.44507042253521, "grad_norm": 0.537979245185852, "learning_rate": 2.1054e-05, "loss": 0.0136, "step": 7021 }, { "epoch": 39.45070422535211, "grad_norm": 0.44961026310920715, "learning_rate": 2.1057e-05, "loss": 0.0075, "step": 7022 }, { "epoch": 39.456338028169014, "grad_norm": 1.258878231048584, "learning_rate": 2.1059999999999998e-05, "loss": 0.0754, "step": 7023 }, { "epoch": 39.46197183098592, "grad_norm": 0.44853687286376953, "learning_rate": 2.1062999999999998e-05, "loss": 0.0137, "step": 7024 }, { "epoch": 39.46760563380282, "grad_norm": 0.5169007182121277, "learning_rate": 2.1066e-05, "loss": 0.017, "step": 7025 }, { "epoch": 39.47323943661972, "grad_norm": 0.417328804731369, "learning_rate": 2.1069e-05, "loss": 0.0079, "step": 7026 }, { "epoch": 39.478873239436616, "grad_norm": 0.46349963545799255, "learning_rate": 2.1072e-05, "loss": 0.0208, "step": 7027 }, { "epoch": 39.48450704225352, "grad_norm": 0.5020545125007629, "learning_rate": 2.1075e-05, "loss": 0.0403, "step": 7028 }, { "epoch": 39.49014084507042, "grad_norm": 1.1184394359588623, "learning_rate": 2.1078e-05, "loss": 0.0312, "step": 7029 }, { "epoch": 39.49577464788732, "grad_norm": 0.3942098617553711, "learning_rate": 2.1081e-05, "loss": 0.0145, "step": 7030 }, { "epoch": 39.501408450704226, "grad_norm": 0.7548105716705322, "learning_rate": 2.1084e-05, "loss": 0.2072, "step": 7031 }, { "epoch": 39.50704225352113, "grad_norm": 0.6903153657913208, "learning_rate": 2.1087e-05, "loss": 0.0895, "step": 7032 }, { "epoch": 39.51267605633803, "grad_norm": 0.46426790952682495, "learning_rate": 2.109e-05, "loss": 0.0614, "step": 7033 }, { "epoch": 39.51830985915493, "grad_norm": 0.4998452961444855, "learning_rate": 2.1093e-05, "loss": 0.0747, "step": 7034 }, { "epoch": 39.52394366197183, "grad_norm": 0.8418598771095276, "learning_rate": 2.1096000000000003e-05, "loss": 0.0454, "step": 7035 }, { "epoch": 39.52957746478873, "grad_norm": 0.7046505808830261, "learning_rate": 2.1099000000000002e-05, "loss": 0.0623, "step": 7036 }, { "epoch": 39.53521126760563, "grad_norm": 0.4472900331020355, "learning_rate": 2.1102000000000002e-05, "loss": 0.0464, "step": 7037 }, { "epoch": 39.540845070422534, "grad_norm": 0.8147168755531311, "learning_rate": 2.1105000000000002e-05, "loss": 0.0426, "step": 7038 }, { "epoch": 39.54647887323944, "grad_norm": 0.4927672743797302, "learning_rate": 2.1108000000000002e-05, "loss": 0.0429, "step": 7039 }, { "epoch": 39.55211267605634, "grad_norm": 0.5439338684082031, "learning_rate": 2.1111e-05, "loss": 0.0604, "step": 7040 }, { "epoch": 39.55774647887324, "grad_norm": 0.5325776934623718, "learning_rate": 2.1114e-05, "loss": 0.069, "step": 7041 }, { "epoch": 39.563380281690144, "grad_norm": 0.5106490254402161, "learning_rate": 2.1117e-05, "loss": 0.0415, "step": 7042 }, { "epoch": 39.56901408450704, "grad_norm": 0.8691709637641907, "learning_rate": 2.1119999999999998e-05, "loss": 0.0906, "step": 7043 }, { "epoch": 39.57464788732394, "grad_norm": 0.4854414761066437, "learning_rate": 2.1122999999999997e-05, "loss": 0.0411, "step": 7044 }, { "epoch": 39.58028169014084, "grad_norm": 0.48537641763687134, "learning_rate": 2.1126e-05, "loss": 0.0494, "step": 7045 }, { "epoch": 39.585915492957746, "grad_norm": 0.49664849042892456, "learning_rate": 2.1129e-05, "loss": 0.0253, "step": 7046 }, { "epoch": 39.59154929577465, "grad_norm": 0.7244488596916199, "learning_rate": 2.1132e-05, "loss": 0.0273, "step": 7047 }, { "epoch": 39.59718309859155, "grad_norm": 0.43302592635154724, "learning_rate": 2.1135e-05, "loss": 0.0317, "step": 7048 }, { "epoch": 39.60281690140845, "grad_norm": 0.5143494606018066, "learning_rate": 2.1138e-05, "loss": 0.0269, "step": 7049 }, { "epoch": 39.608450704225355, "grad_norm": 0.7941577434539795, "learning_rate": 2.1141e-05, "loss": 0.0245, "step": 7050 }, { "epoch": 39.61408450704225, "grad_norm": 0.6604679226875305, "learning_rate": 2.1144e-05, "loss": 0.0206, "step": 7051 }, { "epoch": 39.61971830985915, "grad_norm": 0.48554039001464844, "learning_rate": 2.1147e-05, "loss": 0.0618, "step": 7052 }, { "epoch": 39.625352112676055, "grad_norm": 0.40817496180534363, "learning_rate": 2.115e-05, "loss": 0.0204, "step": 7053 }, { "epoch": 39.63098591549296, "grad_norm": 0.32231399416923523, "learning_rate": 2.1153e-05, "loss": 0.0123, "step": 7054 }, { "epoch": 39.63661971830986, "grad_norm": 0.44376853108406067, "learning_rate": 2.1156000000000002e-05, "loss": 0.0184, "step": 7055 }, { "epoch": 39.64225352112676, "grad_norm": 0.4671339988708496, "learning_rate": 2.1159000000000002e-05, "loss": 0.0577, "step": 7056 }, { "epoch": 39.647887323943664, "grad_norm": 0.6680634021759033, "learning_rate": 2.1162e-05, "loss": 0.0228, "step": 7057 }, { "epoch": 39.653521126760566, "grad_norm": 0.7686365842819214, "learning_rate": 2.1165e-05, "loss": 0.0148, "step": 7058 }, { "epoch": 39.65915492957747, "grad_norm": 0.5177909731864929, "learning_rate": 2.1168e-05, "loss": 0.0153, "step": 7059 }, { "epoch": 39.66478873239436, "grad_norm": 0.5917974710464478, "learning_rate": 2.1171e-05, "loss": 0.0139, "step": 7060 }, { "epoch": 39.670422535211266, "grad_norm": 0.3662303388118744, "learning_rate": 2.1174e-05, "loss": 0.0082, "step": 7061 }, { "epoch": 39.67605633802817, "grad_norm": 0.463968425989151, "learning_rate": 2.1177e-05, "loss": 0.0444, "step": 7062 }, { "epoch": 39.68169014084507, "grad_norm": 0.6857075095176697, "learning_rate": 2.118e-05, "loss": 0.0535, "step": 7063 }, { "epoch": 39.68732394366197, "grad_norm": 0.3500816226005554, "learning_rate": 2.1183e-05, "loss": 0.0224, "step": 7064 }, { "epoch": 39.692957746478875, "grad_norm": 0.6937262415885925, "learning_rate": 2.1186000000000003e-05, "loss": 0.0302, "step": 7065 }, { "epoch": 39.69859154929578, "grad_norm": 0.4441058337688446, "learning_rate": 2.1189000000000003e-05, "loss": 0.0179, "step": 7066 }, { "epoch": 39.70422535211267, "grad_norm": 0.5563717484474182, "learning_rate": 2.1192e-05, "loss": 0.0092, "step": 7067 }, { "epoch": 39.709859154929575, "grad_norm": 0.34043705463409424, "learning_rate": 2.1195e-05, "loss": 0.0074, "step": 7068 }, { "epoch": 39.71549295774648, "grad_norm": 0.4467583894729614, "learning_rate": 2.1198e-05, "loss": 0.0249, "step": 7069 }, { "epoch": 39.72112676056338, "grad_norm": 0.5814791917800903, "learning_rate": 2.1201e-05, "loss": 0.0223, "step": 7070 }, { "epoch": 39.72676056338028, "grad_norm": 0.5910232067108154, "learning_rate": 2.1204e-05, "loss": 0.0102, "step": 7071 }, { "epoch": 39.732394366197184, "grad_norm": 0.4660097360610962, "learning_rate": 2.1207e-05, "loss": 0.0681, "step": 7072 }, { "epoch": 39.738028169014086, "grad_norm": 0.9971121549606323, "learning_rate": 2.121e-05, "loss": 0.0393, "step": 7073 }, { "epoch": 39.74366197183099, "grad_norm": 0.3295377492904663, "learning_rate": 2.1213e-05, "loss": 0.0045, "step": 7074 }, { "epoch": 39.74929577464789, "grad_norm": 0.6778653860092163, "learning_rate": 2.1216e-05, "loss": 0.093, "step": 7075 }, { "epoch": 39.754929577464786, "grad_norm": 0.6046915054321289, "learning_rate": 2.1219e-05, "loss": 0.0645, "step": 7076 }, { "epoch": 39.76056338028169, "grad_norm": 0.7605347037315369, "learning_rate": 2.1222e-05, "loss": 0.0927, "step": 7077 }, { "epoch": 39.76619718309859, "grad_norm": 0.5245162844657898, "learning_rate": 2.1225e-05, "loss": 0.059, "step": 7078 }, { "epoch": 39.77183098591549, "grad_norm": 0.5399230122566223, "learning_rate": 2.1228e-05, "loss": 0.0624, "step": 7079 }, { "epoch": 39.777464788732395, "grad_norm": 0.7588193416595459, "learning_rate": 2.1231e-05, "loss": 0.0668, "step": 7080 }, { "epoch": 39.7830985915493, "grad_norm": 1.010830283164978, "learning_rate": 2.1234e-05, "loss": 0.1211, "step": 7081 }, { "epoch": 39.7887323943662, "grad_norm": 0.46037229895591736, "learning_rate": 2.1237e-05, "loss": 0.0444, "step": 7082 }, { "epoch": 39.7943661971831, "grad_norm": 0.5602880716323853, "learning_rate": 2.124e-05, "loss": 0.0516, "step": 7083 }, { "epoch": 39.8, "grad_norm": 0.47242674231529236, "learning_rate": 2.1243e-05, "loss": 0.0523, "step": 7084 }, { "epoch": 39.8056338028169, "grad_norm": 0.4851509928703308, "learning_rate": 2.1246000000000003e-05, "loss": 0.0365, "step": 7085 }, { "epoch": 39.8112676056338, "grad_norm": 0.4728541672229767, "learning_rate": 2.1249000000000003e-05, "loss": 0.0341, "step": 7086 }, { "epoch": 39.816901408450704, "grad_norm": 0.41112202405929565, "learning_rate": 2.1252000000000003e-05, "loss": 0.037, "step": 7087 }, { "epoch": 39.822535211267606, "grad_norm": 0.3808818757534027, "learning_rate": 2.1255000000000002e-05, "loss": 0.0203, "step": 7088 }, { "epoch": 39.82816901408451, "grad_norm": 0.6341428756713867, "learning_rate": 2.1258000000000002e-05, "loss": 0.0396, "step": 7089 }, { "epoch": 39.83380281690141, "grad_norm": 0.8634621500968933, "learning_rate": 2.1261000000000002e-05, "loss": 0.0551, "step": 7090 }, { "epoch": 39.83943661971831, "grad_norm": 0.3405787944793701, "learning_rate": 2.1264000000000002e-05, "loss": 0.0195, "step": 7091 }, { "epoch": 39.84507042253521, "grad_norm": 0.4699031710624695, "learning_rate": 2.1266999999999998e-05, "loss": 0.0259, "step": 7092 }, { "epoch": 39.85070422535211, "grad_norm": 0.5263028144836426, "learning_rate": 2.1269999999999998e-05, "loss": 0.0294, "step": 7093 }, { "epoch": 39.85633802816901, "grad_norm": 0.3954383432865143, "learning_rate": 2.1272999999999998e-05, "loss": 0.0156, "step": 7094 }, { "epoch": 39.861971830985915, "grad_norm": 0.5547692179679871, "learning_rate": 2.1276e-05, "loss": 0.0363, "step": 7095 }, { "epoch": 39.86760563380282, "grad_norm": 0.522724986076355, "learning_rate": 2.1279e-05, "loss": 0.0231, "step": 7096 }, { "epoch": 39.87323943661972, "grad_norm": 0.7222675681114197, "learning_rate": 2.1282e-05, "loss": 0.0235, "step": 7097 }, { "epoch": 39.87887323943662, "grad_norm": 0.4310052990913391, "learning_rate": 2.1285e-05, "loss": 0.0176, "step": 7098 }, { "epoch": 39.884507042253524, "grad_norm": 0.44840338826179504, "learning_rate": 2.1288e-05, "loss": 0.0437, "step": 7099 }, { "epoch": 39.89014084507042, "grad_norm": 0.5042721033096313, "learning_rate": 2.1291e-05, "loss": 0.0496, "step": 7100 }, { "epoch": 39.89577464788732, "grad_norm": 0.3853495121002197, "learning_rate": 2.1294e-05, "loss": 0.0146, "step": 7101 }, { "epoch": 39.901408450704224, "grad_norm": 0.4886343479156494, "learning_rate": 2.1297e-05, "loss": 0.0185, "step": 7102 }, { "epoch": 39.907042253521126, "grad_norm": 0.2733897566795349, "learning_rate": 2.13e-05, "loss": 0.0096, "step": 7103 }, { "epoch": 39.91267605633803, "grad_norm": 0.3725746273994446, "learning_rate": 2.1303e-05, "loss": 0.035, "step": 7104 }, { "epoch": 39.91830985915493, "grad_norm": 0.5761329531669617, "learning_rate": 2.1306000000000002e-05, "loss": 0.0389, "step": 7105 }, { "epoch": 39.92394366197183, "grad_norm": 0.391465425491333, "learning_rate": 2.1309000000000002e-05, "loss": 0.0118, "step": 7106 }, { "epoch": 39.929577464788736, "grad_norm": 0.5347964763641357, "learning_rate": 2.1312000000000002e-05, "loss": 0.0528, "step": 7107 }, { "epoch": 39.93521126760563, "grad_norm": 0.5539050698280334, "learning_rate": 2.1315000000000002e-05, "loss": 0.0448, "step": 7108 }, { "epoch": 39.94084507042253, "grad_norm": 0.57712721824646, "learning_rate": 2.1318e-05, "loss": 0.0184, "step": 7109 }, { "epoch": 39.946478873239435, "grad_norm": 0.8562357425689697, "learning_rate": 2.1321e-05, "loss": 0.0308, "step": 7110 }, { "epoch": 39.95211267605634, "grad_norm": 0.6761751770973206, "learning_rate": 2.1324e-05, "loss": 0.0239, "step": 7111 }, { "epoch": 39.95774647887324, "grad_norm": 0.5991113185882568, "learning_rate": 2.1327e-05, "loss": 0.0449, "step": 7112 }, { "epoch": 39.96338028169014, "grad_norm": 0.5972670912742615, "learning_rate": 2.133e-05, "loss": 0.011, "step": 7113 }, { "epoch": 39.969014084507045, "grad_norm": 0.5339862704277039, "learning_rate": 2.1333e-05, "loss": 0.042, "step": 7114 }, { "epoch": 39.97464788732395, "grad_norm": 0.5252397060394287, "learning_rate": 2.1336000000000004e-05, "loss": 0.0121, "step": 7115 }, { "epoch": 39.98028169014084, "grad_norm": 0.4393249452114105, "learning_rate": 2.1339e-05, "loss": 0.0209, "step": 7116 }, { "epoch": 39.985915492957744, "grad_norm": 0.4526563584804535, "learning_rate": 2.1342e-05, "loss": 0.0095, "step": 7117 }, { "epoch": 39.99154929577465, "grad_norm": 0.3842989504337311, "learning_rate": 2.1345e-05, "loss": 0.0171, "step": 7118 }, { "epoch": 39.99718309859155, "grad_norm": 0.6042697429656982, "learning_rate": 2.1348e-05, "loss": 0.0422, "step": 7119 }, { "epoch": 40.0, "grad_norm": 0.2379719465970993, "learning_rate": 2.1351e-05, "loss": 0.0133, "step": 7120 }, { "epoch": 40.0056338028169, "grad_norm": 0.8447229266166687, "learning_rate": 2.1354e-05, "loss": 0.1741, "step": 7121 }, { "epoch": 40.011267605633805, "grad_norm": 0.5718019604682922, "learning_rate": 2.1357e-05, "loss": 0.0551, "step": 7122 }, { "epoch": 40.01690140845071, "grad_norm": 0.643542468547821, "learning_rate": 2.136e-05, "loss": 0.0768, "step": 7123 }, { "epoch": 40.02253521126761, "grad_norm": 0.4280613958835602, "learning_rate": 2.1363e-05, "loss": 0.0632, "step": 7124 }, { "epoch": 40.028169014084504, "grad_norm": 0.4524391293525696, "learning_rate": 2.1366000000000002e-05, "loss": 0.0773, "step": 7125 }, { "epoch": 40.03380281690141, "grad_norm": 1.2211836576461792, "learning_rate": 2.1369e-05, "loss": 0.0536, "step": 7126 }, { "epoch": 40.03943661971831, "grad_norm": 0.5010496973991394, "learning_rate": 2.1372e-05, "loss": 0.0496, "step": 7127 }, { "epoch": 40.04507042253521, "grad_norm": 0.4405420124530792, "learning_rate": 2.1375e-05, "loss": 0.0672, "step": 7128 }, { "epoch": 40.05070422535211, "grad_norm": 0.46523386240005493, "learning_rate": 2.1378e-05, "loss": 0.0366, "step": 7129 }, { "epoch": 40.056338028169016, "grad_norm": 0.4357774257659912, "learning_rate": 2.1381e-05, "loss": 0.03, "step": 7130 }, { "epoch": 40.06197183098592, "grad_norm": 0.5991832613945007, "learning_rate": 2.1384e-05, "loss": 0.0528, "step": 7131 }, { "epoch": 40.06760563380282, "grad_norm": 0.42492663860321045, "learning_rate": 2.1387e-05, "loss": 0.0231, "step": 7132 }, { "epoch": 40.073239436619716, "grad_norm": 0.4839966595172882, "learning_rate": 2.139e-05, "loss": 0.0396, "step": 7133 }, { "epoch": 40.07887323943662, "grad_norm": 0.3727223575115204, "learning_rate": 2.1393e-05, "loss": 0.0421, "step": 7134 }, { "epoch": 40.08450704225352, "grad_norm": 0.35589298605918884, "learning_rate": 2.1396e-05, "loss": 0.0367, "step": 7135 }, { "epoch": 40.09014084507042, "grad_norm": 0.48074716329574585, "learning_rate": 2.1399000000000003e-05, "loss": 0.0357, "step": 7136 }, { "epoch": 40.095774647887325, "grad_norm": 0.502296507358551, "learning_rate": 2.1402000000000003e-05, "loss": 0.062, "step": 7137 }, { "epoch": 40.10140845070423, "grad_norm": 0.7055134177207947, "learning_rate": 2.1405000000000003e-05, "loss": 0.0334, "step": 7138 }, { "epoch": 40.10704225352113, "grad_norm": 0.29221072793006897, "learning_rate": 2.1408000000000002e-05, "loss": 0.0146, "step": 7139 }, { "epoch": 40.11267605633803, "grad_norm": 0.48186904191970825, "learning_rate": 2.1411000000000002e-05, "loss": 0.0192, "step": 7140 }, { "epoch": 40.11830985915493, "grad_norm": 0.581623911857605, "learning_rate": 2.1414e-05, "loss": 0.0337, "step": 7141 }, { "epoch": 40.12394366197183, "grad_norm": 0.6366983652114868, "learning_rate": 2.1417e-05, "loss": 0.0301, "step": 7142 }, { "epoch": 40.12957746478873, "grad_norm": 0.889064610004425, "learning_rate": 2.1419999999999998e-05, "loss": 0.0401, "step": 7143 }, { "epoch": 40.135211267605634, "grad_norm": 0.6769999265670776, "learning_rate": 2.1422999999999998e-05, "loss": 0.02, "step": 7144 }, { "epoch": 40.140845070422536, "grad_norm": 0.638644814491272, "learning_rate": 2.1425999999999998e-05, "loss": 0.0091, "step": 7145 }, { "epoch": 40.14647887323944, "grad_norm": 0.36699378490448, "learning_rate": 2.1429e-05, "loss": 0.0241, "step": 7146 }, { "epoch": 40.15211267605634, "grad_norm": 0.38765543699264526, "learning_rate": 2.1432e-05, "loss": 0.0166, "step": 7147 }, { "epoch": 40.15774647887324, "grad_norm": 0.4328952431678772, "learning_rate": 2.1435e-05, "loss": 0.0182, "step": 7148 }, { "epoch": 40.16338028169014, "grad_norm": 0.30416637659072876, "learning_rate": 2.1438e-05, "loss": 0.0083, "step": 7149 }, { "epoch": 40.16901408450704, "grad_norm": 0.6623621582984924, "learning_rate": 2.1441e-05, "loss": 0.0283, "step": 7150 }, { "epoch": 40.17464788732394, "grad_norm": 0.5505245923995972, "learning_rate": 2.1444e-05, "loss": 0.0127, "step": 7151 }, { "epoch": 40.180281690140845, "grad_norm": 0.6081250905990601, "learning_rate": 2.1447e-05, "loss": 0.0149, "step": 7152 }, { "epoch": 40.18591549295775, "grad_norm": 0.7445876002311707, "learning_rate": 2.145e-05, "loss": 0.0379, "step": 7153 }, { "epoch": 40.19154929577465, "grad_norm": 0.4476659297943115, "learning_rate": 2.1453e-05, "loss": 0.0341, "step": 7154 }, { "epoch": 40.19718309859155, "grad_norm": 0.5084972977638245, "learning_rate": 2.1456e-05, "loss": 0.0263, "step": 7155 }, { "epoch": 40.202816901408454, "grad_norm": 0.4474823474884033, "learning_rate": 2.1459000000000002e-05, "loss": 0.0182, "step": 7156 }, { "epoch": 40.20845070422535, "grad_norm": 0.3970654606819153, "learning_rate": 2.1462000000000002e-05, "loss": 0.0196, "step": 7157 }, { "epoch": 40.21408450704225, "grad_norm": 1.111177682876587, "learning_rate": 2.1465000000000002e-05, "loss": 0.0138, "step": 7158 }, { "epoch": 40.219718309859154, "grad_norm": 0.4719654321670532, "learning_rate": 2.1468000000000002e-05, "loss": 0.0248, "step": 7159 }, { "epoch": 40.225352112676056, "grad_norm": 1.4718738794326782, "learning_rate": 2.1471e-05, "loss": 0.0669, "step": 7160 }, { "epoch": 40.23098591549296, "grad_norm": 0.4876256287097931, "learning_rate": 2.1474e-05, "loss": 0.0167, "step": 7161 }, { "epoch": 40.23661971830986, "grad_norm": 0.3276064395904541, "learning_rate": 2.1477e-05, "loss": 0.0053, "step": 7162 }, { "epoch": 40.24225352112676, "grad_norm": 0.3793768584728241, "learning_rate": 2.148e-05, "loss": 0.0064, "step": 7163 }, { "epoch": 40.247887323943665, "grad_norm": 0.2881784439086914, "learning_rate": 2.1483e-05, "loss": 0.0129, "step": 7164 }, { "epoch": 40.25352112676056, "grad_norm": 0.6334677934646606, "learning_rate": 2.1486e-05, "loss": 0.0871, "step": 7165 }, { "epoch": 40.25915492957746, "grad_norm": 0.5926594138145447, "learning_rate": 2.1489e-05, "loss": 0.0895, "step": 7166 }, { "epoch": 40.264788732394365, "grad_norm": 0.530018150806427, "learning_rate": 2.1492e-05, "loss": 0.0574, "step": 7167 }, { "epoch": 40.27042253521127, "grad_norm": 0.5542733669281006, "learning_rate": 2.1495e-05, "loss": 0.0708, "step": 7168 }, { "epoch": 40.27605633802817, "grad_norm": 0.5518811941146851, "learning_rate": 2.1498e-05, "loss": 0.059, "step": 7169 }, { "epoch": 40.28169014084507, "grad_norm": 0.5270456075668335, "learning_rate": 2.1501e-05, "loss": 0.0399, "step": 7170 }, { "epoch": 40.287323943661974, "grad_norm": 0.6375643014907837, "learning_rate": 2.1504e-05, "loss": 0.0933, "step": 7171 }, { "epoch": 40.292957746478876, "grad_norm": 0.5149639844894409, "learning_rate": 2.1507e-05, "loss": 0.0743, "step": 7172 }, { "epoch": 40.29859154929577, "grad_norm": 0.43957602977752686, "learning_rate": 2.151e-05, "loss": 0.0346, "step": 7173 }, { "epoch": 40.304225352112674, "grad_norm": 0.4500507712364197, "learning_rate": 2.1513e-05, "loss": 0.0545, "step": 7174 }, { "epoch": 40.309859154929576, "grad_norm": 0.6965996623039246, "learning_rate": 2.1516e-05, "loss": 0.031, "step": 7175 }, { "epoch": 40.31549295774648, "grad_norm": 0.41806885600090027, "learning_rate": 2.1519000000000002e-05, "loss": 0.0352, "step": 7176 }, { "epoch": 40.32112676056338, "grad_norm": 0.37929394841194153, "learning_rate": 2.1522e-05, "loss": 0.0682, "step": 7177 }, { "epoch": 40.32676056338028, "grad_norm": 0.47061407566070557, "learning_rate": 2.1525e-05, "loss": 0.0481, "step": 7178 }, { "epoch": 40.332394366197185, "grad_norm": 0.31762659549713135, "learning_rate": 2.1528e-05, "loss": 0.0207, "step": 7179 }, { "epoch": 40.33802816901409, "grad_norm": 0.47674840688705444, "learning_rate": 2.1531e-05, "loss": 0.0488, "step": 7180 }, { "epoch": 40.34366197183098, "grad_norm": 0.5373939275741577, "learning_rate": 2.1534e-05, "loss": 0.0191, "step": 7181 }, { "epoch": 40.349295774647885, "grad_norm": 0.3873714804649353, "learning_rate": 2.1537e-05, "loss": 0.0297, "step": 7182 }, { "epoch": 40.35492957746479, "grad_norm": 1.56401526927948, "learning_rate": 2.154e-05, "loss": 0.0317, "step": 7183 }, { "epoch": 40.36056338028169, "grad_norm": 0.8100740909576416, "learning_rate": 2.1543e-05, "loss": 0.0179, "step": 7184 }, { "epoch": 40.36619718309859, "grad_norm": 0.4313648045063019, "learning_rate": 2.1546e-05, "loss": 0.0258, "step": 7185 }, { "epoch": 40.371830985915494, "grad_norm": 0.3544413447380066, "learning_rate": 2.1549000000000003e-05, "loss": 0.0261, "step": 7186 }, { "epoch": 40.3774647887324, "grad_norm": 0.37589818239212036, "learning_rate": 2.1552000000000003e-05, "loss": 0.0386, "step": 7187 }, { "epoch": 40.3830985915493, "grad_norm": 0.49624067544937134, "learning_rate": 2.1555000000000003e-05, "loss": 0.016, "step": 7188 }, { "epoch": 40.388732394366194, "grad_norm": 0.49328845739364624, "learning_rate": 2.1558000000000003e-05, "loss": 0.0149, "step": 7189 }, { "epoch": 40.394366197183096, "grad_norm": 0.42776381969451904, "learning_rate": 2.1561e-05, "loss": 0.0245, "step": 7190 }, { "epoch": 40.4, "grad_norm": 0.6903371810913086, "learning_rate": 2.1564e-05, "loss": 0.0564, "step": 7191 }, { "epoch": 40.4056338028169, "grad_norm": 0.3743841052055359, "learning_rate": 2.1567e-05, "loss": 0.0179, "step": 7192 }, { "epoch": 40.4112676056338, "grad_norm": 0.5459771752357483, "learning_rate": 2.157e-05, "loss": 0.0399, "step": 7193 }, { "epoch": 40.416901408450705, "grad_norm": 0.669917106628418, "learning_rate": 2.1572999999999998e-05, "loss": 0.0183, "step": 7194 }, { "epoch": 40.42253521126761, "grad_norm": 0.44614699482917786, "learning_rate": 2.1575999999999998e-05, "loss": 0.0088, "step": 7195 }, { "epoch": 40.42816901408451, "grad_norm": 0.34231024980545044, "learning_rate": 2.1579e-05, "loss": 0.0165, "step": 7196 }, { "epoch": 40.433802816901405, "grad_norm": 0.514539897441864, "learning_rate": 2.1582e-05, "loss": 0.0127, "step": 7197 }, { "epoch": 40.43943661971831, "grad_norm": 0.33348533511161804, "learning_rate": 2.1585e-05, "loss": 0.0478, "step": 7198 }, { "epoch": 40.44507042253521, "grad_norm": 0.4709072709083557, "learning_rate": 2.1588e-05, "loss": 0.0381, "step": 7199 }, { "epoch": 40.45070422535211, "grad_norm": 0.6439768671989441, "learning_rate": 2.1591e-05, "loss": 0.0212, "step": 7200 }, { "epoch": 40.456338028169014, "grad_norm": 0.4294296205043793, "learning_rate": 2.1594e-05, "loss": 0.0226, "step": 7201 }, { "epoch": 40.46197183098592, "grad_norm": 0.23453833162784576, "learning_rate": 2.1597e-05, "loss": 0.0065, "step": 7202 }, { "epoch": 40.46760563380282, "grad_norm": 0.38492029905319214, "learning_rate": 2.16e-05, "loss": 0.009, "step": 7203 }, { "epoch": 40.47323943661972, "grad_norm": 0.4478702247142792, "learning_rate": 2.1603e-05, "loss": 0.0063, "step": 7204 }, { "epoch": 40.478873239436616, "grad_norm": 0.26532095670700073, "learning_rate": 2.1606e-05, "loss": 0.0044, "step": 7205 }, { "epoch": 40.48450704225352, "grad_norm": 0.5906198620796204, "learning_rate": 2.1609000000000003e-05, "loss": 0.043, "step": 7206 }, { "epoch": 40.49014084507042, "grad_norm": 0.4752597510814667, "learning_rate": 2.1612000000000002e-05, "loss": 0.0084, "step": 7207 }, { "epoch": 40.49577464788732, "grad_norm": 0.9713632464408875, "learning_rate": 2.1615000000000002e-05, "loss": 0.0167, "step": 7208 }, { "epoch": 40.501408450704226, "grad_norm": 0.613592803478241, "learning_rate": 2.1618000000000002e-05, "loss": 0.0739, "step": 7209 }, { "epoch": 40.50704225352113, "grad_norm": 0.632597029209137, "learning_rate": 2.1621000000000002e-05, "loss": 0.0624, "step": 7210 }, { "epoch": 40.51267605633803, "grad_norm": 0.5637673139572144, "learning_rate": 2.1624e-05, "loss": 0.0581, "step": 7211 }, { "epoch": 40.51830985915493, "grad_norm": 0.5646774768829346, "learning_rate": 2.1627e-05, "loss": 0.1105, "step": 7212 }, { "epoch": 40.52394366197183, "grad_norm": 0.5309867262840271, "learning_rate": 2.163e-05, "loss": 0.0698, "step": 7213 }, { "epoch": 40.52957746478873, "grad_norm": 0.4877185523509979, "learning_rate": 2.1633e-05, "loss": 0.062, "step": 7214 }, { "epoch": 40.53521126760563, "grad_norm": 0.43662524223327637, "learning_rate": 2.1635999999999997e-05, "loss": 0.0391, "step": 7215 }, { "epoch": 40.540845070422534, "grad_norm": 0.5607251524925232, "learning_rate": 2.1639e-05, "loss": 0.0521, "step": 7216 }, { "epoch": 40.54647887323944, "grad_norm": 0.758955717086792, "learning_rate": 2.1642e-05, "loss": 0.1074, "step": 7217 }, { "epoch": 40.55211267605634, "grad_norm": 0.4429689943790436, "learning_rate": 2.1645e-05, "loss": 0.0477, "step": 7218 }, { "epoch": 40.55774647887324, "grad_norm": 0.43690064549446106, "learning_rate": 2.1648e-05, "loss": 0.0306, "step": 7219 }, { "epoch": 40.563380281690144, "grad_norm": 0.5178461670875549, "learning_rate": 2.1651e-05, "loss": 0.0289, "step": 7220 }, { "epoch": 40.56901408450704, "grad_norm": 0.6984038949012756, "learning_rate": 2.1654e-05, "loss": 0.0312, "step": 7221 }, { "epoch": 40.57464788732394, "grad_norm": 0.6882136464118958, "learning_rate": 2.1657e-05, "loss": 0.0339, "step": 7222 }, { "epoch": 40.58028169014084, "grad_norm": 0.479965478181839, "learning_rate": 2.166e-05, "loss": 0.0467, "step": 7223 }, { "epoch": 40.585915492957746, "grad_norm": 0.44811418652534485, "learning_rate": 2.1663e-05, "loss": 0.0608, "step": 7224 }, { "epoch": 40.59154929577465, "grad_norm": 0.4509431719779968, "learning_rate": 2.1666e-05, "loss": 0.0264, "step": 7225 }, { "epoch": 40.59718309859155, "grad_norm": 0.4737272560596466, "learning_rate": 2.1669000000000002e-05, "loss": 0.0236, "step": 7226 }, { "epoch": 40.60281690140845, "grad_norm": 0.6757000684738159, "learning_rate": 2.1672000000000002e-05, "loss": 0.0457, "step": 7227 }, { "epoch": 40.608450704225355, "grad_norm": 0.5078580379486084, "learning_rate": 2.1675e-05, "loss": 0.019, "step": 7228 }, { "epoch": 40.61408450704225, "grad_norm": 0.6035044193267822, "learning_rate": 2.1678e-05, "loss": 0.0214, "step": 7229 }, { "epoch": 40.61971830985915, "grad_norm": 0.4635510742664337, "learning_rate": 2.1681e-05, "loss": 0.015, "step": 7230 }, { "epoch": 40.625352112676055, "grad_norm": 0.5085580945014954, "learning_rate": 2.1684e-05, "loss": 0.0292, "step": 7231 }, { "epoch": 40.63098591549296, "grad_norm": 0.41871723532676697, "learning_rate": 2.1687e-05, "loss": 0.0129, "step": 7232 }, { "epoch": 40.63661971830986, "grad_norm": 1.0834201574325562, "learning_rate": 2.169e-05, "loss": 0.0144, "step": 7233 }, { "epoch": 40.64225352112676, "grad_norm": 0.7941882014274597, "learning_rate": 2.1693e-05, "loss": 0.0184, "step": 7234 }, { "epoch": 40.647887323943664, "grad_norm": 0.6716424822807312, "learning_rate": 2.1696e-05, "loss": 0.0176, "step": 7235 }, { "epoch": 40.653521126760566, "grad_norm": 0.4056658446788788, "learning_rate": 2.1699000000000003e-05, "loss": 0.0105, "step": 7236 }, { "epoch": 40.65915492957747, "grad_norm": 0.5428013801574707, "learning_rate": 2.1702000000000003e-05, "loss": 0.0221, "step": 7237 }, { "epoch": 40.66478873239436, "grad_norm": 0.816891610622406, "learning_rate": 2.1705000000000003e-05, "loss": 0.0401, "step": 7238 }, { "epoch": 40.670422535211266, "grad_norm": 1.0232573747634888, "learning_rate": 2.1708e-05, "loss": 0.0457, "step": 7239 }, { "epoch": 40.67605633802817, "grad_norm": 0.449572890996933, "learning_rate": 2.1711e-05, "loss": 0.0252, "step": 7240 }, { "epoch": 40.68169014084507, "grad_norm": 0.4730805456638336, "learning_rate": 2.1714e-05, "loss": 0.0491, "step": 7241 }, { "epoch": 40.68732394366197, "grad_norm": 0.5541530251502991, "learning_rate": 2.1717e-05, "loss": 0.0122, "step": 7242 }, { "epoch": 40.692957746478875, "grad_norm": 0.41596660017967224, "learning_rate": 2.172e-05, "loss": 0.015, "step": 7243 }, { "epoch": 40.69859154929578, "grad_norm": 0.5371163487434387, "learning_rate": 2.1723e-05, "loss": 0.0127, "step": 7244 }, { "epoch": 40.70422535211267, "grad_norm": 0.3921475410461426, "learning_rate": 2.1726e-05, "loss": 0.0397, "step": 7245 }, { "epoch": 40.709859154929575, "grad_norm": 0.6130651831626892, "learning_rate": 2.1729e-05, "loss": 0.0323, "step": 7246 }, { "epoch": 40.71549295774648, "grad_norm": 0.646409809589386, "learning_rate": 2.1732e-05, "loss": 0.0159, "step": 7247 }, { "epoch": 40.72112676056338, "grad_norm": 0.5833065509796143, "learning_rate": 2.1735e-05, "loss": 0.033, "step": 7248 }, { "epoch": 40.72676056338028, "grad_norm": 0.3713236153125763, "learning_rate": 2.1738e-05, "loss": 0.0075, "step": 7249 }, { "epoch": 40.732394366197184, "grad_norm": 0.38839974999427795, "learning_rate": 2.1741e-05, "loss": 0.0182, "step": 7250 }, { "epoch": 40.738028169014086, "grad_norm": 0.5810709595680237, "learning_rate": 2.1744e-05, "loss": 0.0929, "step": 7251 }, { "epoch": 40.74366197183099, "grad_norm": 0.37198835611343384, "learning_rate": 2.1747e-05, "loss": 0.0146, "step": 7252 }, { "epoch": 40.74929577464789, "grad_norm": 0.8013471961021423, "learning_rate": 2.175e-05, "loss": 0.142, "step": 7253 }, { "epoch": 40.754929577464786, "grad_norm": 0.736466646194458, "learning_rate": 2.1753e-05, "loss": 0.1006, "step": 7254 }, { "epoch": 40.76056338028169, "grad_norm": 0.6761038303375244, "learning_rate": 2.1756e-05, "loss": 0.0867, "step": 7255 }, { "epoch": 40.76619718309859, "grad_norm": 0.47302675247192383, "learning_rate": 2.1759e-05, "loss": 0.0672, "step": 7256 }, { "epoch": 40.77183098591549, "grad_norm": 0.6134636998176575, "learning_rate": 2.1762000000000003e-05, "loss": 0.0478, "step": 7257 }, { "epoch": 40.777464788732395, "grad_norm": 0.7362503409385681, "learning_rate": 2.1765000000000003e-05, "loss": 0.088, "step": 7258 }, { "epoch": 40.7830985915493, "grad_norm": 0.5194374918937683, "learning_rate": 2.1768000000000002e-05, "loss": 0.061, "step": 7259 }, { "epoch": 40.7887323943662, "grad_norm": 0.5743239521980286, "learning_rate": 2.1771000000000002e-05, "loss": 0.1001, "step": 7260 }, { "epoch": 40.7943661971831, "grad_norm": 0.5738633275032043, "learning_rate": 2.1774000000000002e-05, "loss": 0.0396, "step": 7261 }, { "epoch": 40.8, "grad_norm": 0.6775990128517151, "learning_rate": 2.1777000000000002e-05, "loss": 0.0699, "step": 7262 }, { "epoch": 40.8056338028169, "grad_norm": 0.66319340467453, "learning_rate": 2.178e-05, "loss": 0.0533, "step": 7263 }, { "epoch": 40.8112676056338, "grad_norm": 0.37667131423950195, "learning_rate": 2.1782999999999998e-05, "loss": 0.025, "step": 7264 }, { "epoch": 40.816901408450704, "grad_norm": 0.5921575427055359, "learning_rate": 2.1785999999999998e-05, "loss": 0.1015, "step": 7265 }, { "epoch": 40.822535211267606, "grad_norm": 0.3889005780220032, "learning_rate": 2.1788999999999998e-05, "loss": 0.0188, "step": 7266 }, { "epoch": 40.82816901408451, "grad_norm": 0.647361159324646, "learning_rate": 2.1792e-05, "loss": 0.0451, "step": 7267 }, { "epoch": 40.83380281690141, "grad_norm": 0.4459315836429596, "learning_rate": 2.1795e-05, "loss": 0.037, "step": 7268 }, { "epoch": 40.83943661971831, "grad_norm": 0.6539285182952881, "learning_rate": 2.1798e-05, "loss": 0.0212, "step": 7269 }, { "epoch": 40.84507042253521, "grad_norm": 0.3927265405654907, "learning_rate": 2.1801e-05, "loss": 0.022, "step": 7270 }, { "epoch": 40.85070422535211, "grad_norm": 0.4090675711631775, "learning_rate": 2.1804e-05, "loss": 0.0293, "step": 7271 }, { "epoch": 40.85633802816901, "grad_norm": 0.35799115896224976, "learning_rate": 2.1807e-05, "loss": 0.0152, "step": 7272 }, { "epoch": 40.861971830985915, "grad_norm": 0.684632420539856, "learning_rate": 2.181e-05, "loss": 0.036, "step": 7273 }, { "epoch": 40.86760563380282, "grad_norm": 0.37529316544532776, "learning_rate": 2.1813e-05, "loss": 0.0347, "step": 7274 }, { "epoch": 40.87323943661972, "grad_norm": 0.4109133183956146, "learning_rate": 2.1816e-05, "loss": 0.0323, "step": 7275 }, { "epoch": 40.87887323943662, "grad_norm": 0.45916974544525146, "learning_rate": 2.1819e-05, "loss": 0.0141, "step": 7276 }, { "epoch": 40.884507042253524, "grad_norm": 0.6354519724845886, "learning_rate": 2.1822000000000002e-05, "loss": 0.0546, "step": 7277 }, { "epoch": 40.89014084507042, "grad_norm": 0.3846895098686218, "learning_rate": 2.1825000000000002e-05, "loss": 0.0126, "step": 7278 }, { "epoch": 40.89577464788732, "grad_norm": 0.47118356823921204, "learning_rate": 2.1828000000000002e-05, "loss": 0.0186, "step": 7279 }, { "epoch": 40.901408450704224, "grad_norm": 0.3080807030200958, "learning_rate": 2.1831e-05, "loss": 0.0104, "step": 7280 }, { "epoch": 40.907042253521126, "grad_norm": 0.3731978237628937, "learning_rate": 2.1834e-05, "loss": 0.014, "step": 7281 }, { "epoch": 40.91267605633803, "grad_norm": 0.4179995656013489, "learning_rate": 2.1837e-05, "loss": 0.0236, "step": 7282 }, { "epoch": 40.91830985915493, "grad_norm": 0.4967740476131439, "learning_rate": 2.184e-05, "loss": 0.0096, "step": 7283 }, { "epoch": 40.92394366197183, "grad_norm": 1.0967016220092773, "learning_rate": 2.1843e-05, "loss": 0.0688, "step": 7284 }, { "epoch": 40.929577464788736, "grad_norm": 0.28844910860061646, "learning_rate": 2.1846e-05, "loss": 0.0302, "step": 7285 }, { "epoch": 40.93521126760563, "grad_norm": 0.3858742415904999, "learning_rate": 2.1849e-05, "loss": 0.0392, "step": 7286 }, { "epoch": 40.94084507042253, "grad_norm": 0.2977699935436249, "learning_rate": 2.1852000000000004e-05, "loss": 0.0251, "step": 7287 }, { "epoch": 40.946478873239435, "grad_norm": 0.5298717021942139, "learning_rate": 2.1855e-05, "loss": 0.0161, "step": 7288 }, { "epoch": 40.95211267605634, "grad_norm": 1.0125477313995361, "learning_rate": 2.1858e-05, "loss": 0.0144, "step": 7289 }, { "epoch": 40.95774647887324, "grad_norm": 0.35466256737709045, "learning_rate": 2.1861e-05, "loss": 0.0195, "step": 7290 }, { "epoch": 40.96338028169014, "grad_norm": 0.4197901785373688, "learning_rate": 2.1864e-05, "loss": 0.023, "step": 7291 }, { "epoch": 40.969014084507045, "grad_norm": 0.4610864222049713, "learning_rate": 2.1867e-05, "loss": 0.0087, "step": 7292 }, { "epoch": 40.97464788732395, "grad_norm": 0.4224700331687927, "learning_rate": 2.187e-05, "loss": 0.0151, "step": 7293 }, { "epoch": 40.98028169014084, "grad_norm": 0.6696301102638245, "learning_rate": 2.1873e-05, "loss": 0.0594, "step": 7294 }, { "epoch": 40.985915492957744, "grad_norm": 0.9926684498786926, "learning_rate": 2.1876e-05, "loss": 0.0089, "step": 7295 }, { "epoch": 40.99154929577465, "grad_norm": 0.36854735016822815, "learning_rate": 2.1879e-05, "loss": 0.0068, "step": 7296 }, { "epoch": 40.99718309859155, "grad_norm": 0.4118436276912689, "learning_rate": 2.1882e-05, "loss": 0.0194, "step": 7297 }, { "epoch": 41.0, "grad_norm": 1.0033135414123535, "learning_rate": 2.1885e-05, "loss": 0.0036, "step": 7298 }, { "epoch": 41.0056338028169, "grad_norm": 0.9898669123649597, "learning_rate": 2.1888e-05, "loss": 0.1764, "step": 7299 }, { "epoch": 41.011267605633805, "grad_norm": 0.6242010593414307, "learning_rate": 2.1891e-05, "loss": 0.0675, "step": 7300 }, { "epoch": 41.01690140845071, "grad_norm": 0.5101968050003052, "learning_rate": 2.1894e-05, "loss": 0.044, "step": 7301 }, { "epoch": 41.02253521126761, "grad_norm": 0.6621572375297546, "learning_rate": 2.1897e-05, "loss": 0.0926, "step": 7302 }, { "epoch": 41.028169014084504, "grad_norm": 0.47020184993743896, "learning_rate": 2.19e-05, "loss": 0.0886, "step": 7303 }, { "epoch": 41.03380281690141, "grad_norm": 0.5109107494354248, "learning_rate": 2.1903e-05, "loss": 0.0511, "step": 7304 }, { "epoch": 41.03943661971831, "grad_norm": 0.37169912457466125, "learning_rate": 2.1906e-05, "loss": 0.0317, "step": 7305 }, { "epoch": 41.04507042253521, "grad_norm": 0.492925763130188, "learning_rate": 2.1909e-05, "loss": 0.0322, "step": 7306 }, { "epoch": 41.05070422535211, "grad_norm": 0.8137713074684143, "learning_rate": 2.1912000000000003e-05, "loss": 0.0444, "step": 7307 }, { "epoch": 41.056338028169016, "grad_norm": 0.5124775171279907, "learning_rate": 2.1915000000000003e-05, "loss": 0.0546, "step": 7308 }, { "epoch": 41.06197183098592, "grad_norm": 0.8181898593902588, "learning_rate": 2.1918000000000003e-05, "loss": 0.0438, "step": 7309 }, { "epoch": 41.06760563380282, "grad_norm": 0.6376608610153198, "learning_rate": 2.1921000000000002e-05, "loss": 0.0406, "step": 7310 }, { "epoch": 41.073239436619716, "grad_norm": 0.6506083011627197, "learning_rate": 2.1924000000000002e-05, "loss": 0.083, "step": 7311 }, { "epoch": 41.07887323943662, "grad_norm": 0.47925442457199097, "learning_rate": 2.1927000000000002e-05, "loss": 0.039, "step": 7312 }, { "epoch": 41.08450704225352, "grad_norm": 0.5144093036651611, "learning_rate": 2.193e-05, "loss": 0.0154, "step": 7313 }, { "epoch": 41.09014084507042, "grad_norm": 0.5094054341316223, "learning_rate": 2.1932999999999998e-05, "loss": 0.0477, "step": 7314 }, { "epoch": 41.095774647887325, "grad_norm": 0.4652869999408722, "learning_rate": 2.1935999999999998e-05, "loss": 0.0278, "step": 7315 }, { "epoch": 41.10140845070423, "grad_norm": 0.5474463701248169, "learning_rate": 2.1938999999999998e-05, "loss": 0.0558, "step": 7316 }, { "epoch": 41.10704225352113, "grad_norm": 0.4273635745048523, "learning_rate": 2.1942e-05, "loss": 0.0185, "step": 7317 }, { "epoch": 41.11267605633803, "grad_norm": 0.4756261706352234, "learning_rate": 2.1945e-05, "loss": 0.0184, "step": 7318 }, { "epoch": 41.11830985915493, "grad_norm": 0.38917455077171326, "learning_rate": 2.1948e-05, "loss": 0.013, "step": 7319 }, { "epoch": 41.12394366197183, "grad_norm": 0.4594959020614624, "learning_rate": 2.1951e-05, "loss": 0.0173, "step": 7320 }, { "epoch": 41.12957746478873, "grad_norm": 0.36795905232429504, "learning_rate": 2.1954e-05, "loss": 0.0382, "step": 7321 }, { "epoch": 41.135211267605634, "grad_norm": 0.3712332844734192, "learning_rate": 2.1957e-05, "loss": 0.0086, "step": 7322 }, { "epoch": 41.140845070422536, "grad_norm": 0.4777301251888275, "learning_rate": 2.196e-05, "loss": 0.0215, "step": 7323 }, { "epoch": 41.14647887323944, "grad_norm": 0.916012704372406, "learning_rate": 2.1963e-05, "loss": 0.0241, "step": 7324 }, { "epoch": 41.15211267605634, "grad_norm": 0.4114127457141876, "learning_rate": 2.1966e-05, "loss": 0.0101, "step": 7325 }, { "epoch": 41.15774647887324, "grad_norm": 0.5708482265472412, "learning_rate": 2.1969e-05, "loss": 0.0151, "step": 7326 }, { "epoch": 41.16338028169014, "grad_norm": 0.338919997215271, "learning_rate": 2.1972000000000002e-05, "loss": 0.01, "step": 7327 }, { "epoch": 41.16901408450704, "grad_norm": 0.3943823277950287, "learning_rate": 2.1975000000000002e-05, "loss": 0.04, "step": 7328 }, { "epoch": 41.17464788732394, "grad_norm": 0.4395487606525421, "learning_rate": 2.1978000000000002e-05, "loss": 0.0295, "step": 7329 }, { "epoch": 41.180281690140845, "grad_norm": 0.8027469515800476, "learning_rate": 2.1981000000000002e-05, "loss": 0.0869, "step": 7330 }, { "epoch": 41.18591549295775, "grad_norm": 0.6709253191947937, "learning_rate": 2.1984e-05, "loss": 0.0609, "step": 7331 }, { "epoch": 41.19154929577465, "grad_norm": 0.31614696979522705, "learning_rate": 2.1987e-05, "loss": 0.0153, "step": 7332 }, { "epoch": 41.19718309859155, "grad_norm": 0.7606804370880127, "learning_rate": 2.199e-05, "loss": 0.0215, "step": 7333 }, { "epoch": 41.202816901408454, "grad_norm": 0.7237105369567871, "learning_rate": 2.1993e-05, "loss": 0.0167, "step": 7334 }, { "epoch": 41.20845070422535, "grad_norm": 0.6069414019584656, "learning_rate": 2.1996e-05, "loss": 0.0243, "step": 7335 }, { "epoch": 41.21408450704225, "grad_norm": 0.8469422459602356, "learning_rate": 2.1999e-05, "loss": 0.0329, "step": 7336 }, { "epoch": 41.219718309859154, "grad_norm": 0.3824959099292755, "learning_rate": 2.2002e-05, "loss": 0.0165, "step": 7337 }, { "epoch": 41.225352112676056, "grad_norm": 0.56353360414505, "learning_rate": 2.2005e-05, "loss": 0.0167, "step": 7338 }, { "epoch": 41.23098591549296, "grad_norm": 0.4120638966560364, "learning_rate": 2.2008e-05, "loss": 0.0058, "step": 7339 }, { "epoch": 41.23661971830986, "grad_norm": 0.21951113641262054, "learning_rate": 2.2011e-05, "loss": 0.0039, "step": 7340 }, { "epoch": 41.24225352112676, "grad_norm": 0.890927255153656, "learning_rate": 2.2014e-05, "loss": 0.068, "step": 7341 }, { "epoch": 41.247887323943665, "grad_norm": 0.44290691614151, "learning_rate": 2.2017e-05, "loss": 0.0131, "step": 7342 }, { "epoch": 41.25352112676056, "grad_norm": 0.7158246636390686, "learning_rate": 2.202e-05, "loss": 0.0883, "step": 7343 }, { "epoch": 41.25915492957746, "grad_norm": 0.48387399315834045, "learning_rate": 2.2023e-05, "loss": 0.0499, "step": 7344 }, { "epoch": 41.264788732394365, "grad_norm": 0.6160509586334229, "learning_rate": 2.2026e-05, "loss": 0.074, "step": 7345 }, { "epoch": 41.27042253521127, "grad_norm": 1.0213394165039062, "learning_rate": 2.2029e-05, "loss": 0.0626, "step": 7346 }, { "epoch": 41.27605633802817, "grad_norm": 0.5953682661056519, "learning_rate": 2.2032000000000002e-05, "loss": 0.037, "step": 7347 }, { "epoch": 41.28169014084507, "grad_norm": 0.5291324257850647, "learning_rate": 2.2035e-05, "loss": 0.0558, "step": 7348 }, { "epoch": 41.287323943661974, "grad_norm": 0.5266879796981812, "learning_rate": 2.2038e-05, "loss": 0.1123, "step": 7349 }, { "epoch": 41.292957746478876, "grad_norm": 0.5514253973960876, "learning_rate": 2.2041e-05, "loss": 0.0528, "step": 7350 }, { "epoch": 41.29859154929577, "grad_norm": 0.5147024989128113, "learning_rate": 2.2044e-05, "loss": 0.0807, "step": 7351 }, { "epoch": 41.304225352112674, "grad_norm": 0.32103875279426575, "learning_rate": 2.2047e-05, "loss": 0.019, "step": 7352 }, { "epoch": 41.309859154929576, "grad_norm": 0.38996464014053345, "learning_rate": 2.205e-05, "loss": 0.0462, "step": 7353 }, { "epoch": 41.31549295774648, "grad_norm": 0.34832727909088135, "learning_rate": 2.2053e-05, "loss": 0.0266, "step": 7354 }, { "epoch": 41.32112676056338, "grad_norm": 0.9495737552642822, "learning_rate": 2.2056e-05, "loss": 0.0788, "step": 7355 }, { "epoch": 41.32676056338028, "grad_norm": 0.33857056498527527, "learning_rate": 2.2059e-05, "loss": 0.0237, "step": 7356 }, { "epoch": 41.332394366197185, "grad_norm": 0.6272710561752319, "learning_rate": 2.2062000000000003e-05, "loss": 0.0326, "step": 7357 }, { "epoch": 41.33802816901409, "grad_norm": 0.4501407742500305, "learning_rate": 2.2065000000000003e-05, "loss": 0.0502, "step": 7358 }, { "epoch": 41.34366197183098, "grad_norm": 0.42354726791381836, "learning_rate": 2.2068000000000003e-05, "loss": 0.0308, "step": 7359 }, { "epoch": 41.349295774647885, "grad_norm": 0.8243660926818848, "learning_rate": 2.2071000000000003e-05, "loss": 0.0386, "step": 7360 }, { "epoch": 41.35492957746479, "grad_norm": 0.4902227222919464, "learning_rate": 2.2074000000000002e-05, "loss": 0.0207, "step": 7361 }, { "epoch": 41.36056338028169, "grad_norm": 0.3059861660003662, "learning_rate": 2.2077e-05, "loss": 0.0124, "step": 7362 }, { "epoch": 41.36619718309859, "grad_norm": 0.3509253263473511, "learning_rate": 2.208e-05, "loss": 0.0264, "step": 7363 }, { "epoch": 41.371830985915494, "grad_norm": 0.4614291489124298, "learning_rate": 2.2083e-05, "loss": 0.0278, "step": 7364 }, { "epoch": 41.3774647887324, "grad_norm": 0.8305753469467163, "learning_rate": 2.2085999999999998e-05, "loss": 0.0642, "step": 7365 }, { "epoch": 41.3830985915493, "grad_norm": 0.43430081009864807, "learning_rate": 2.2088999999999998e-05, "loss": 0.0289, "step": 7366 }, { "epoch": 41.388732394366194, "grad_norm": 0.35428690910339355, "learning_rate": 2.2092e-05, "loss": 0.0376, "step": 7367 }, { "epoch": 41.394366197183096, "grad_norm": 0.4388071894645691, "learning_rate": 2.2095e-05, "loss": 0.0246, "step": 7368 }, { "epoch": 41.4, "grad_norm": 0.4648820459842682, "learning_rate": 2.2098e-05, "loss": 0.0097, "step": 7369 }, { "epoch": 41.4056338028169, "grad_norm": 0.606419563293457, "learning_rate": 2.2101e-05, "loss": 0.0122, "step": 7370 }, { "epoch": 41.4112676056338, "grad_norm": 0.6295449137687683, "learning_rate": 2.2104e-05, "loss": 0.0121, "step": 7371 }, { "epoch": 41.416901408450705, "grad_norm": 0.9096497893333435, "learning_rate": 2.2107e-05, "loss": 0.015, "step": 7372 }, { "epoch": 41.42253521126761, "grad_norm": 0.460497111082077, "learning_rate": 2.211e-05, "loss": 0.0102, "step": 7373 }, { "epoch": 41.42816901408451, "grad_norm": 0.7912516593933105, "learning_rate": 2.2113e-05, "loss": 0.0281, "step": 7374 }, { "epoch": 41.433802816901405, "grad_norm": 0.5247809886932373, "learning_rate": 2.2116e-05, "loss": 0.0115, "step": 7375 }, { "epoch": 41.43943661971831, "grad_norm": 0.44276443123817444, "learning_rate": 2.2119e-05, "loss": 0.0228, "step": 7376 }, { "epoch": 41.44507042253521, "grad_norm": 0.43640902638435364, "learning_rate": 2.2122000000000003e-05, "loss": 0.0246, "step": 7377 }, { "epoch": 41.45070422535211, "grad_norm": 0.37731078267097473, "learning_rate": 2.2125000000000002e-05, "loss": 0.0125, "step": 7378 }, { "epoch": 41.456338028169014, "grad_norm": 0.3606593608856201, "learning_rate": 2.2128000000000002e-05, "loss": 0.0061, "step": 7379 }, { "epoch": 41.46197183098592, "grad_norm": 0.2654917538166046, "learning_rate": 2.2131000000000002e-05, "loss": 0.0066, "step": 7380 }, { "epoch": 41.46760563380282, "grad_norm": 0.390985369682312, "learning_rate": 2.2134000000000002e-05, "loss": 0.009, "step": 7381 }, { "epoch": 41.47323943661972, "grad_norm": 1.0798989534378052, "learning_rate": 2.2137e-05, "loss": 0.0213, "step": 7382 }, { "epoch": 41.478873239436616, "grad_norm": 0.4452832043170929, "learning_rate": 2.214e-05, "loss": 0.0157, "step": 7383 }, { "epoch": 41.48450704225352, "grad_norm": 0.7211203575134277, "learning_rate": 2.2143e-05, "loss": 0.0219, "step": 7384 }, { "epoch": 41.49014084507042, "grad_norm": 0.9520562887191772, "learning_rate": 2.2146e-05, "loss": 0.0095, "step": 7385 }, { "epoch": 41.49577464788732, "grad_norm": 1.2487355470657349, "learning_rate": 2.2149e-05, "loss": 0.0327, "step": 7386 }, { "epoch": 41.501408450704226, "grad_norm": 0.7173846960067749, "learning_rate": 2.2151999999999997e-05, "loss": 0.1452, "step": 7387 }, { "epoch": 41.50704225352113, "grad_norm": 0.5066644549369812, "learning_rate": 2.2155e-05, "loss": 0.0573, "step": 7388 }, { "epoch": 41.51267605633803, "grad_norm": 0.539594829082489, "learning_rate": 2.2158e-05, "loss": 0.0598, "step": 7389 }, { "epoch": 41.51830985915493, "grad_norm": 0.49055588245391846, "learning_rate": 2.2161e-05, "loss": 0.0703, "step": 7390 }, { "epoch": 41.52394366197183, "grad_norm": 0.5523117780685425, "learning_rate": 2.2164e-05, "loss": 0.0575, "step": 7391 }, { "epoch": 41.52957746478873, "grad_norm": 0.5373201966285706, "learning_rate": 2.2167e-05, "loss": 0.0677, "step": 7392 }, { "epoch": 41.53521126760563, "grad_norm": 0.5330915451049805, "learning_rate": 2.217e-05, "loss": 0.0439, "step": 7393 }, { "epoch": 41.540845070422534, "grad_norm": 0.6422502398490906, "learning_rate": 2.2173e-05, "loss": 0.0593, "step": 7394 }, { "epoch": 41.54647887323944, "grad_norm": 0.4478074908256531, "learning_rate": 2.2176e-05, "loss": 0.0293, "step": 7395 }, { "epoch": 41.55211267605634, "grad_norm": 0.47766101360321045, "learning_rate": 2.2179e-05, "loss": 0.026, "step": 7396 }, { "epoch": 41.55774647887324, "grad_norm": 0.4163079559803009, "learning_rate": 2.2182e-05, "loss": 0.0266, "step": 7397 }, { "epoch": 41.563380281690144, "grad_norm": 0.5868075489997864, "learning_rate": 2.2185000000000002e-05, "loss": 0.0308, "step": 7398 }, { "epoch": 41.56901408450704, "grad_norm": 0.5567490458488464, "learning_rate": 2.2188e-05, "loss": 0.0298, "step": 7399 }, { "epoch": 41.57464788732394, "grad_norm": 0.4416974186897278, "learning_rate": 2.2191e-05, "loss": 0.0509, "step": 7400 }, { "epoch": 41.58028169014084, "grad_norm": 0.4700062572956085, "learning_rate": 2.2194e-05, "loss": 0.0208, "step": 7401 }, { "epoch": 41.585915492957746, "grad_norm": 0.549470067024231, "learning_rate": 2.2197e-05, "loss": 0.0212, "step": 7402 }, { "epoch": 41.59154929577465, "grad_norm": 0.35693100094795227, "learning_rate": 2.22e-05, "loss": 0.016, "step": 7403 }, { "epoch": 41.59718309859155, "grad_norm": 0.3676251471042633, "learning_rate": 2.2203e-05, "loss": 0.025, "step": 7404 }, { "epoch": 41.60281690140845, "grad_norm": 0.338407039642334, "learning_rate": 2.2206e-05, "loss": 0.0142, "step": 7405 }, { "epoch": 41.608450704225355, "grad_norm": 0.42238759994506836, "learning_rate": 2.2209e-05, "loss": 0.0174, "step": 7406 }, { "epoch": 41.61408450704225, "grad_norm": 0.329033225774765, "learning_rate": 2.2212e-05, "loss": 0.0144, "step": 7407 }, { "epoch": 41.61971830985915, "grad_norm": 0.3183523118495941, "learning_rate": 2.2215000000000003e-05, "loss": 0.016, "step": 7408 }, { "epoch": 41.625352112676055, "grad_norm": 0.49124544858932495, "learning_rate": 2.2218000000000003e-05, "loss": 0.0146, "step": 7409 }, { "epoch": 41.63098591549296, "grad_norm": 0.43780672550201416, "learning_rate": 2.2221000000000003e-05, "loss": 0.0135, "step": 7410 }, { "epoch": 41.63661971830986, "grad_norm": 0.40981411933898926, "learning_rate": 2.2224e-05, "loss": 0.0275, "step": 7411 }, { "epoch": 41.64225352112676, "grad_norm": 0.4536800980567932, "learning_rate": 2.2227e-05, "loss": 0.0229, "step": 7412 }, { "epoch": 41.647887323943664, "grad_norm": 0.9796376824378967, "learning_rate": 2.223e-05, "loss": 0.0655, "step": 7413 }, { "epoch": 41.653521126760566, "grad_norm": 0.3480322062969208, "learning_rate": 2.2233e-05, "loss": 0.0121, "step": 7414 }, { "epoch": 41.65915492957747, "grad_norm": 0.2872413992881775, "learning_rate": 2.2236e-05, "loss": 0.0097, "step": 7415 }, { "epoch": 41.66478873239436, "grad_norm": 0.6365059614181519, "learning_rate": 2.2239e-05, "loss": 0.0538, "step": 7416 }, { "epoch": 41.670422535211266, "grad_norm": 0.5667309165000916, "learning_rate": 2.2241999999999998e-05, "loss": 0.0207, "step": 7417 }, { "epoch": 41.67605633802817, "grad_norm": 0.46407651901245117, "learning_rate": 2.2245e-05, "loss": 0.01, "step": 7418 }, { "epoch": 41.68169014084507, "grad_norm": 0.3405577838420868, "learning_rate": 2.2248e-05, "loss": 0.0114, "step": 7419 }, { "epoch": 41.68732394366197, "grad_norm": 0.4698214828968048, "learning_rate": 2.2251e-05, "loss": 0.0622, "step": 7420 }, { "epoch": 41.692957746478875, "grad_norm": 0.48994356393814087, "learning_rate": 2.2254e-05, "loss": 0.0297, "step": 7421 }, { "epoch": 41.69859154929578, "grad_norm": 0.36201798915863037, "learning_rate": 2.2257e-05, "loss": 0.0054, "step": 7422 }, { "epoch": 41.70422535211267, "grad_norm": 0.4496486783027649, "learning_rate": 2.226e-05, "loss": 0.0307, "step": 7423 }, { "epoch": 41.709859154929575, "grad_norm": 0.6476020216941833, "learning_rate": 2.2263e-05, "loss": 0.0324, "step": 7424 }, { "epoch": 41.71549295774648, "grad_norm": 0.45306816697120667, "learning_rate": 2.2266e-05, "loss": 0.0068, "step": 7425 }, { "epoch": 41.72112676056338, "grad_norm": 0.6007418036460876, "learning_rate": 2.2269e-05, "loss": 0.0323, "step": 7426 }, { "epoch": 41.72676056338028, "grad_norm": 0.41943180561065674, "learning_rate": 2.2272e-05, "loss": 0.0063, "step": 7427 }, { "epoch": 41.732394366197184, "grad_norm": 0.7763713598251343, "learning_rate": 2.2275000000000003e-05, "loss": 0.0159, "step": 7428 }, { "epoch": 41.738028169014086, "grad_norm": 0.245377317070961, "learning_rate": 2.2278000000000003e-05, "loss": 0.0053, "step": 7429 }, { "epoch": 41.74366197183099, "grad_norm": 0.5646254420280457, "learning_rate": 2.2281000000000002e-05, "loss": 0.0097, "step": 7430 }, { "epoch": 41.74929577464789, "grad_norm": 0.6623941659927368, "learning_rate": 2.2284000000000002e-05, "loss": 0.0816, "step": 7431 }, { "epoch": 41.754929577464786, "grad_norm": 0.7322630882263184, "learning_rate": 2.2287000000000002e-05, "loss": 0.0839, "step": 7432 }, { "epoch": 41.76056338028169, "grad_norm": 0.5589054226875305, "learning_rate": 2.2290000000000002e-05, "loss": 0.0529, "step": 7433 }, { "epoch": 41.76619718309859, "grad_norm": 0.5063641667366028, "learning_rate": 2.2293e-05, "loss": 0.0651, "step": 7434 }, { "epoch": 41.77183098591549, "grad_norm": 0.5205628871917725, "learning_rate": 2.2296e-05, "loss": 0.069, "step": 7435 }, { "epoch": 41.777464788732395, "grad_norm": 0.5120084285736084, "learning_rate": 2.2298999999999998e-05, "loss": 0.0757, "step": 7436 }, { "epoch": 41.7830985915493, "grad_norm": 0.49558699131011963, "learning_rate": 2.2301999999999998e-05, "loss": 0.0507, "step": 7437 }, { "epoch": 41.7887323943662, "grad_norm": 0.5282271504402161, "learning_rate": 2.2305e-05, "loss": 0.0763, "step": 7438 }, { "epoch": 41.7943661971831, "grad_norm": 0.5021595358848572, "learning_rate": 2.2308e-05, "loss": 0.0907, "step": 7439 }, { "epoch": 41.8, "grad_norm": 0.4585444927215576, "learning_rate": 2.2311e-05, "loss": 0.0523, "step": 7440 }, { "epoch": 41.8056338028169, "grad_norm": 0.4321590065956116, "learning_rate": 2.2314e-05, "loss": 0.0415, "step": 7441 }, { "epoch": 41.8112676056338, "grad_norm": 0.44097399711608887, "learning_rate": 2.2317e-05, "loss": 0.0342, "step": 7442 }, { "epoch": 41.816901408450704, "grad_norm": 0.5200923681259155, "learning_rate": 2.232e-05, "loss": 0.0498, "step": 7443 }, { "epoch": 41.822535211267606, "grad_norm": 0.4615960717201233, "learning_rate": 2.2323e-05, "loss": 0.0402, "step": 7444 }, { "epoch": 41.82816901408451, "grad_norm": 0.48530614376068115, "learning_rate": 2.2326e-05, "loss": 0.0502, "step": 7445 }, { "epoch": 41.83380281690141, "grad_norm": 0.37238767743110657, "learning_rate": 2.2329e-05, "loss": 0.0264, "step": 7446 }, { "epoch": 41.83943661971831, "grad_norm": 1.1830722093582153, "learning_rate": 2.2332e-05, "loss": 0.0684, "step": 7447 }, { "epoch": 41.84507042253521, "grad_norm": 0.5106154680252075, "learning_rate": 2.2335000000000002e-05, "loss": 0.018, "step": 7448 }, { "epoch": 41.85070422535211, "grad_norm": 0.36259374022483826, "learning_rate": 2.2338000000000002e-05, "loss": 0.0154, "step": 7449 }, { "epoch": 41.85633802816901, "grad_norm": 0.5373011827468872, "learning_rate": 2.2341000000000002e-05, "loss": 0.0232, "step": 7450 }, { "epoch": 41.861971830985915, "grad_norm": 0.6343364715576172, "learning_rate": 2.2344e-05, "loss": 0.0479, "step": 7451 }, { "epoch": 41.86760563380282, "grad_norm": 0.40285637974739075, "learning_rate": 2.2347e-05, "loss": 0.0371, "step": 7452 }, { "epoch": 41.87323943661972, "grad_norm": 0.5997134447097778, "learning_rate": 2.235e-05, "loss": 0.032, "step": 7453 }, { "epoch": 41.87887323943662, "grad_norm": 0.3895038068294525, "learning_rate": 2.2353e-05, "loss": 0.0154, "step": 7454 }, { "epoch": 41.884507042253524, "grad_norm": 0.32587841153144836, "learning_rate": 2.2356e-05, "loss": 0.0113, "step": 7455 }, { "epoch": 41.89014084507042, "grad_norm": 0.440353125333786, "learning_rate": 2.2359e-05, "loss": 0.0177, "step": 7456 }, { "epoch": 41.89577464788732, "grad_norm": 0.5475175976753235, "learning_rate": 2.2362e-05, "loss": 0.0213, "step": 7457 }, { "epoch": 41.901408450704224, "grad_norm": 0.3844890296459198, "learning_rate": 2.2365000000000004e-05, "loss": 0.0093, "step": 7458 }, { "epoch": 41.907042253521126, "grad_norm": 0.8902540802955627, "learning_rate": 2.2368000000000003e-05, "loss": 0.0191, "step": 7459 }, { "epoch": 41.91267605633803, "grad_norm": 0.31582263112068176, "learning_rate": 2.2371e-05, "loss": 0.0138, "step": 7460 }, { "epoch": 41.91830985915493, "grad_norm": 0.4600745439529419, "learning_rate": 2.2374e-05, "loss": 0.0125, "step": 7461 }, { "epoch": 41.92394366197183, "grad_norm": 0.5684223771095276, "learning_rate": 2.2377e-05, "loss": 0.0112, "step": 7462 }, { "epoch": 41.929577464788736, "grad_norm": 0.5319104194641113, "learning_rate": 2.238e-05, "loss": 0.0398, "step": 7463 }, { "epoch": 41.93521126760563, "grad_norm": 0.4022923707962036, "learning_rate": 2.2383e-05, "loss": 0.0316, "step": 7464 }, { "epoch": 41.94084507042253, "grad_norm": 0.337933748960495, "learning_rate": 2.2386e-05, "loss": 0.0108, "step": 7465 }, { "epoch": 41.946478873239435, "grad_norm": 0.639553427696228, "learning_rate": 2.2389e-05, "loss": 0.0195, "step": 7466 }, { "epoch": 41.95211267605634, "grad_norm": 0.5340489149093628, "learning_rate": 2.2392e-05, "loss": 0.0075, "step": 7467 }, { "epoch": 41.95774647887324, "grad_norm": 0.6593025326728821, "learning_rate": 2.2395e-05, "loss": 0.032, "step": 7468 }, { "epoch": 41.96338028169014, "grad_norm": 1.0286219120025635, "learning_rate": 2.2398e-05, "loss": 0.031, "step": 7469 }, { "epoch": 41.969014084507045, "grad_norm": 0.5558343529701233, "learning_rate": 2.2401e-05, "loss": 0.018, "step": 7470 }, { "epoch": 41.97464788732395, "grad_norm": 0.5247750282287598, "learning_rate": 2.2404e-05, "loss": 0.0093, "step": 7471 }, { "epoch": 41.98028169014084, "grad_norm": 0.4263032078742981, "learning_rate": 2.2407e-05, "loss": 0.0067, "step": 7472 }, { "epoch": 41.985915492957744, "grad_norm": 1.2081217765808105, "learning_rate": 2.241e-05, "loss": 0.0571, "step": 7473 }, { "epoch": 41.99154929577465, "grad_norm": 0.45464858412742615, "learning_rate": 2.2413e-05, "loss": 0.0096, "step": 7474 }, { "epoch": 41.99718309859155, "grad_norm": 0.6052510738372803, "learning_rate": 2.2416e-05, "loss": 0.0352, "step": 7475 }, { "epoch": 42.0, "grad_norm": 0.39129629731178284, "learning_rate": 2.2419e-05, "loss": 0.0606, "step": 7476 }, { "epoch": 42.0056338028169, "grad_norm": 0.5988507270812988, "learning_rate": 2.2422e-05, "loss": 0.1233, "step": 7477 }, { "epoch": 42.011267605633805, "grad_norm": 0.45876067876815796, "learning_rate": 2.2425000000000003e-05, "loss": 0.0449, "step": 7478 }, { "epoch": 42.01690140845071, "grad_norm": 0.6483227014541626, "learning_rate": 2.2428000000000003e-05, "loss": 0.0578, "step": 7479 }, { "epoch": 42.02253521126761, "grad_norm": 0.6408881545066833, "learning_rate": 2.2431000000000003e-05, "loss": 0.0673, "step": 7480 }, { "epoch": 42.028169014084504, "grad_norm": 0.4474213421344757, "learning_rate": 2.2434000000000002e-05, "loss": 0.1153, "step": 7481 }, { "epoch": 42.03380281690141, "grad_norm": 0.7578486204147339, "learning_rate": 2.2437000000000002e-05, "loss": 0.056, "step": 7482 }, { "epoch": 42.03943661971831, "grad_norm": 0.46790099143981934, "learning_rate": 2.2440000000000002e-05, "loss": 0.0462, "step": 7483 }, { "epoch": 42.04507042253521, "grad_norm": 0.4951466917991638, "learning_rate": 2.2443000000000002e-05, "loss": 0.0765, "step": 7484 }, { "epoch": 42.05070422535211, "grad_norm": 0.5230402946472168, "learning_rate": 2.2445999999999998e-05, "loss": 0.0716, "step": 7485 }, { "epoch": 42.056338028169016, "grad_norm": 1.1577682495117188, "learning_rate": 2.2448999999999998e-05, "loss": 0.065, "step": 7486 }, { "epoch": 42.06197183098592, "grad_norm": 0.5868234038352966, "learning_rate": 2.2451999999999998e-05, "loss": 0.0422, "step": 7487 }, { "epoch": 42.06760563380282, "grad_norm": 0.7654135227203369, "learning_rate": 2.2455e-05, "loss": 0.0341, "step": 7488 }, { "epoch": 42.073239436619716, "grad_norm": 0.36027079820632935, "learning_rate": 2.2458e-05, "loss": 0.0209, "step": 7489 }, { "epoch": 42.07887323943662, "grad_norm": 0.5802940726280212, "learning_rate": 2.2461e-05, "loss": 0.0675, "step": 7490 }, { "epoch": 42.08450704225352, "grad_norm": 0.4752560257911682, "learning_rate": 2.2464e-05, "loss": 0.0235, "step": 7491 }, { "epoch": 42.09014084507042, "grad_norm": 0.46027565002441406, "learning_rate": 2.2467e-05, "loss": 0.0348, "step": 7492 }, { "epoch": 42.095774647887325, "grad_norm": 0.5083492994308472, "learning_rate": 2.247e-05, "loss": 0.0202, "step": 7493 }, { "epoch": 42.10140845070423, "grad_norm": 0.32019373774528503, "learning_rate": 2.2473e-05, "loss": 0.0164, "step": 7494 }, { "epoch": 42.10704225352113, "grad_norm": 0.5501285195350647, "learning_rate": 2.2476e-05, "loss": 0.0326, "step": 7495 }, { "epoch": 42.11267605633803, "grad_norm": 0.4517219364643097, "learning_rate": 2.2479e-05, "loss": 0.0138, "step": 7496 }, { "epoch": 42.11830985915493, "grad_norm": 0.3765527904033661, "learning_rate": 2.2482e-05, "loss": 0.0157, "step": 7497 }, { "epoch": 42.12394366197183, "grad_norm": 0.625866711139679, "learning_rate": 2.2485000000000002e-05, "loss": 0.0218, "step": 7498 }, { "epoch": 42.12957746478873, "grad_norm": 0.2990606725215912, "learning_rate": 2.2488000000000002e-05, "loss": 0.0207, "step": 7499 }, { "epoch": 42.135211267605634, "grad_norm": 0.7698860168457031, "learning_rate": 2.2491000000000002e-05, "loss": 0.0369, "step": 7500 }, { "epoch": 42.140845070422536, "grad_norm": 0.5465412139892578, "learning_rate": 2.2494000000000002e-05, "loss": 0.0403, "step": 7501 }, { "epoch": 42.14647887323944, "grad_norm": 0.4785187840461731, "learning_rate": 2.2497e-05, "loss": 0.0182, "step": 7502 }, { "epoch": 42.15211267605634, "grad_norm": 0.4458351135253906, "learning_rate": 2.25e-05, "loss": 0.0485, "step": 7503 }, { "epoch": 42.15774647887324, "grad_norm": 0.4266981780529022, "learning_rate": 2.2503e-05, "loss": 0.0158, "step": 7504 }, { "epoch": 42.16338028169014, "grad_norm": 0.3471778631210327, "learning_rate": 2.2506e-05, "loss": 0.0068, "step": 7505 }, { "epoch": 42.16901408450704, "grad_norm": 0.43547722697257996, "learning_rate": 2.2509e-05, "loss": 0.0273, "step": 7506 }, { "epoch": 42.17464788732394, "grad_norm": 0.7164992690086365, "learning_rate": 2.2512e-05, "loss": 0.0303, "step": 7507 }, { "epoch": 42.180281690140845, "grad_norm": 0.5679174065589905, "learning_rate": 2.2515e-05, "loss": 0.0511, "step": 7508 }, { "epoch": 42.18591549295775, "grad_norm": 0.3377740979194641, "learning_rate": 2.2518e-05, "loss": 0.0214, "step": 7509 }, { "epoch": 42.19154929577465, "grad_norm": 0.3742673397064209, "learning_rate": 2.2521e-05, "loss": 0.0173, "step": 7510 }, { "epoch": 42.19718309859155, "grad_norm": 0.3603390157222748, "learning_rate": 2.2524e-05, "loss": 0.0203, "step": 7511 }, { "epoch": 42.202816901408454, "grad_norm": 0.47893381118774414, "learning_rate": 2.2527e-05, "loss": 0.0095, "step": 7512 }, { "epoch": 42.20845070422535, "grad_norm": 0.5194800496101379, "learning_rate": 2.253e-05, "loss": 0.0165, "step": 7513 }, { "epoch": 42.21408450704225, "grad_norm": 0.45012998580932617, "learning_rate": 2.2533e-05, "loss": 0.0273, "step": 7514 }, { "epoch": 42.219718309859154, "grad_norm": 0.5191600322723389, "learning_rate": 2.2536e-05, "loss": 0.0316, "step": 7515 }, { "epoch": 42.225352112676056, "grad_norm": 1.207686424255371, "learning_rate": 2.2539e-05, "loss": 0.0491, "step": 7516 }, { "epoch": 42.23098591549296, "grad_norm": 0.3129006326198578, "learning_rate": 2.2542e-05, "loss": 0.0049, "step": 7517 }, { "epoch": 42.23661971830986, "grad_norm": 0.33701950311660767, "learning_rate": 2.2545e-05, "loss": 0.0137, "step": 7518 }, { "epoch": 42.24225352112676, "grad_norm": 0.5580872297286987, "learning_rate": 2.2548e-05, "loss": 0.0417, "step": 7519 }, { "epoch": 42.247887323943665, "grad_norm": 0.541022002696991, "learning_rate": 2.2551e-05, "loss": 0.0188, "step": 7520 }, { "epoch": 42.25352112676056, "grad_norm": 0.7988286018371582, "learning_rate": 2.2554e-05, "loss": 0.1536, "step": 7521 }, { "epoch": 42.25915492957746, "grad_norm": 0.6303605437278748, "learning_rate": 2.2557e-05, "loss": 0.1071, "step": 7522 }, { "epoch": 42.264788732394365, "grad_norm": 0.47527793049812317, "learning_rate": 2.256e-05, "loss": 0.0429, "step": 7523 }, { "epoch": 42.27042253521127, "grad_norm": 0.3876914381980896, "learning_rate": 2.2563e-05, "loss": 0.05, "step": 7524 }, { "epoch": 42.27605633802817, "grad_norm": 0.622723400592804, "learning_rate": 2.2566e-05, "loss": 0.0594, "step": 7525 }, { "epoch": 42.28169014084507, "grad_norm": 0.7066499590873718, "learning_rate": 2.2569e-05, "loss": 0.0551, "step": 7526 }, { "epoch": 42.287323943661974, "grad_norm": 0.5086488127708435, "learning_rate": 2.2572e-05, "loss": 0.0551, "step": 7527 }, { "epoch": 42.292957746478876, "grad_norm": 0.5035583972930908, "learning_rate": 2.2575e-05, "loss": 0.0358, "step": 7528 }, { "epoch": 42.29859154929577, "grad_norm": 0.5174965858459473, "learning_rate": 2.2578000000000003e-05, "loss": 0.0474, "step": 7529 }, { "epoch": 42.304225352112674, "grad_norm": 0.7174997925758362, "learning_rate": 2.2581000000000003e-05, "loss": 0.0471, "step": 7530 }, { "epoch": 42.309859154929576, "grad_norm": 0.3904140889644623, "learning_rate": 2.2584000000000003e-05, "loss": 0.0254, "step": 7531 }, { "epoch": 42.31549295774648, "grad_norm": 0.4571654796600342, "learning_rate": 2.2587000000000002e-05, "loss": 0.0242, "step": 7532 }, { "epoch": 42.32112676056338, "grad_norm": 0.4291767179965973, "learning_rate": 2.2590000000000002e-05, "loss": 0.0346, "step": 7533 }, { "epoch": 42.32676056338028, "grad_norm": 0.5634200572967529, "learning_rate": 2.2593e-05, "loss": 0.0629, "step": 7534 }, { "epoch": 42.332394366197185, "grad_norm": 0.38613197207450867, "learning_rate": 2.2596e-05, "loss": 0.0282, "step": 7535 }, { "epoch": 42.33802816901409, "grad_norm": 0.6797226667404175, "learning_rate": 2.2598999999999998e-05, "loss": 0.074, "step": 7536 }, { "epoch": 42.34366197183098, "grad_norm": 1.3571751117706299, "learning_rate": 2.2601999999999998e-05, "loss": 0.0209, "step": 7537 }, { "epoch": 42.349295774647885, "grad_norm": 0.5683722496032715, "learning_rate": 2.2604999999999998e-05, "loss": 0.0339, "step": 7538 }, { "epoch": 42.35492957746479, "grad_norm": 0.5399428009986877, "learning_rate": 2.2608e-05, "loss": 0.0118, "step": 7539 }, { "epoch": 42.36056338028169, "grad_norm": 0.349140465259552, "learning_rate": 2.2611e-05, "loss": 0.0122, "step": 7540 }, { "epoch": 42.36619718309859, "grad_norm": 0.5243947505950928, "learning_rate": 2.2614e-05, "loss": 0.053, "step": 7541 }, { "epoch": 42.371830985915494, "grad_norm": 0.43679893016815186, "learning_rate": 2.2617e-05, "loss": 0.0229, "step": 7542 }, { "epoch": 42.3774647887324, "grad_norm": 0.7319911122322083, "learning_rate": 2.262e-05, "loss": 0.0447, "step": 7543 }, { "epoch": 42.3830985915493, "grad_norm": 0.27181893587112427, "learning_rate": 2.2623e-05, "loss": 0.0087, "step": 7544 }, { "epoch": 42.388732394366194, "grad_norm": 0.45792385935783386, "learning_rate": 2.2626e-05, "loss": 0.0241, "step": 7545 }, { "epoch": 42.394366197183096, "grad_norm": 0.3754235506057739, "learning_rate": 2.2629e-05, "loss": 0.0183, "step": 7546 }, { "epoch": 42.4, "grad_norm": 0.5734699964523315, "learning_rate": 2.2632e-05, "loss": 0.0244, "step": 7547 }, { "epoch": 42.4056338028169, "grad_norm": 0.41143882274627686, "learning_rate": 2.2635e-05, "loss": 0.0107, "step": 7548 }, { "epoch": 42.4112676056338, "grad_norm": 0.9116105437278748, "learning_rate": 2.2638000000000002e-05, "loss": 0.025, "step": 7549 }, { "epoch": 42.416901408450705, "grad_norm": 0.38116201758384705, "learning_rate": 2.2641000000000002e-05, "loss": 0.0136, "step": 7550 }, { "epoch": 42.42253521126761, "grad_norm": 0.24951201677322388, "learning_rate": 2.2644000000000002e-05, "loss": 0.0069, "step": 7551 }, { "epoch": 42.42816901408451, "grad_norm": 0.4167141020298004, "learning_rate": 2.2647000000000002e-05, "loss": 0.0116, "step": 7552 }, { "epoch": 42.433802816901405, "grad_norm": 0.4539431035518646, "learning_rate": 2.265e-05, "loss": 0.0448, "step": 7553 }, { "epoch": 42.43943661971831, "grad_norm": 0.5163365006446838, "learning_rate": 2.2653e-05, "loss": 0.0275, "step": 7554 }, { "epoch": 42.44507042253521, "grad_norm": 0.6498066782951355, "learning_rate": 2.2656e-05, "loss": 0.0191, "step": 7555 }, { "epoch": 42.45070422535211, "grad_norm": 0.42669886350631714, "learning_rate": 2.2659e-05, "loss": 0.0135, "step": 7556 }, { "epoch": 42.456338028169014, "grad_norm": 0.41020041704177856, "learning_rate": 2.2662e-05, "loss": 0.0108, "step": 7557 }, { "epoch": 42.46197183098592, "grad_norm": 0.42957958579063416, "learning_rate": 2.2665e-05, "loss": 0.0079, "step": 7558 }, { "epoch": 42.46760563380282, "grad_norm": 1.562150239944458, "learning_rate": 2.2668e-05, "loss": 0.0157, "step": 7559 }, { "epoch": 42.47323943661972, "grad_norm": 1.1488326787948608, "learning_rate": 2.2671e-05, "loss": 0.0274, "step": 7560 }, { "epoch": 42.478873239436616, "grad_norm": 0.6322613954544067, "learning_rate": 2.2674e-05, "loss": 0.0199, "step": 7561 }, { "epoch": 42.48450704225352, "grad_norm": 0.38417506217956543, "learning_rate": 2.2677e-05, "loss": 0.0069, "step": 7562 }, { "epoch": 42.49014084507042, "grad_norm": 0.29200321435928345, "learning_rate": 2.268e-05, "loss": 0.0031, "step": 7563 }, { "epoch": 42.49577464788732, "grad_norm": 0.567292332649231, "learning_rate": 2.2683e-05, "loss": 0.0194, "step": 7564 }, { "epoch": 42.501408450704226, "grad_norm": 0.8918532133102417, "learning_rate": 2.2686e-05, "loss": 0.091, "step": 7565 }, { "epoch": 42.50704225352113, "grad_norm": 0.6278206706047058, "learning_rate": 2.2689e-05, "loss": 0.0675, "step": 7566 }, { "epoch": 42.51267605633803, "grad_norm": 0.5797451734542847, "learning_rate": 2.2692e-05, "loss": 0.0776, "step": 7567 }, { "epoch": 42.51830985915493, "grad_norm": 0.5330057740211487, "learning_rate": 2.2695e-05, "loss": 0.0539, "step": 7568 }, { "epoch": 42.52394366197183, "grad_norm": 0.5872822403907776, "learning_rate": 2.2698000000000002e-05, "loss": 0.0603, "step": 7569 }, { "epoch": 42.52957746478873, "grad_norm": 0.46162718534469604, "learning_rate": 2.2701000000000002e-05, "loss": 0.0405, "step": 7570 }, { "epoch": 42.53521126760563, "grad_norm": 0.6301929950714111, "learning_rate": 2.2704e-05, "loss": 0.0327, "step": 7571 }, { "epoch": 42.540845070422534, "grad_norm": 0.9593560099601746, "learning_rate": 2.2707e-05, "loss": 0.0989, "step": 7572 }, { "epoch": 42.54647887323944, "grad_norm": 0.439179927110672, "learning_rate": 2.271e-05, "loss": 0.0284, "step": 7573 }, { "epoch": 42.55211267605634, "grad_norm": 0.5184466242790222, "learning_rate": 2.2713e-05, "loss": 0.0291, "step": 7574 }, { "epoch": 42.55774647887324, "grad_norm": 0.5212823152542114, "learning_rate": 2.2716e-05, "loss": 0.0344, "step": 7575 }, { "epoch": 42.563380281690144, "grad_norm": 0.7226054072380066, "learning_rate": 2.2719e-05, "loss": 0.0389, "step": 7576 }, { "epoch": 42.56901408450704, "grad_norm": 0.5342902541160583, "learning_rate": 2.2722e-05, "loss": 0.0465, "step": 7577 }, { "epoch": 42.57464788732394, "grad_norm": 0.4800014793872833, "learning_rate": 2.2725e-05, "loss": 0.0216, "step": 7578 }, { "epoch": 42.58028169014084, "grad_norm": 0.5431079268455505, "learning_rate": 2.2728000000000003e-05, "loss": 0.031, "step": 7579 }, { "epoch": 42.585915492957746, "grad_norm": 0.5016093850135803, "learning_rate": 2.2731000000000003e-05, "loss": 0.0519, "step": 7580 }, { "epoch": 42.59154929577465, "grad_norm": 0.3969337046146393, "learning_rate": 2.2734000000000003e-05, "loss": 0.0265, "step": 7581 }, { "epoch": 42.59718309859155, "grad_norm": 0.7396127581596375, "learning_rate": 2.2737000000000003e-05, "loss": 0.0413, "step": 7582 }, { "epoch": 42.60281690140845, "grad_norm": 0.5934298634529114, "learning_rate": 2.274e-05, "loss": 0.0265, "step": 7583 }, { "epoch": 42.608450704225355, "grad_norm": 0.6929453015327454, "learning_rate": 2.2743e-05, "loss": 0.0244, "step": 7584 }, { "epoch": 42.61408450704225, "grad_norm": 0.4412064254283905, "learning_rate": 2.2746e-05, "loss": 0.0266, "step": 7585 }, { "epoch": 42.61971830985915, "grad_norm": 0.38507190346717834, "learning_rate": 2.2749e-05, "loss": 0.0169, "step": 7586 }, { "epoch": 42.625352112676055, "grad_norm": 1.883690595626831, "learning_rate": 2.2752e-05, "loss": 0.049, "step": 7587 }, { "epoch": 42.63098591549296, "grad_norm": 0.406414270401001, "learning_rate": 2.2754999999999998e-05, "loss": 0.0167, "step": 7588 }, { "epoch": 42.63661971830986, "grad_norm": 0.36991238594055176, "learning_rate": 2.2758e-05, "loss": 0.0153, "step": 7589 }, { "epoch": 42.64225352112676, "grad_norm": 0.44890010356903076, "learning_rate": 2.2761e-05, "loss": 0.0199, "step": 7590 }, { "epoch": 42.647887323943664, "grad_norm": 0.4985082149505615, "learning_rate": 2.2764e-05, "loss": 0.0108, "step": 7591 }, { "epoch": 42.653521126760566, "grad_norm": 0.591476559638977, "learning_rate": 2.2767e-05, "loss": 0.0206, "step": 7592 }, { "epoch": 42.65915492957747, "grad_norm": 0.4985911548137665, "learning_rate": 2.277e-05, "loss": 0.02, "step": 7593 }, { "epoch": 42.66478873239436, "grad_norm": 0.8128790855407715, "learning_rate": 2.2773e-05, "loss": 0.0374, "step": 7594 }, { "epoch": 42.670422535211266, "grad_norm": 0.3646816313266754, "learning_rate": 2.2776e-05, "loss": 0.0102, "step": 7595 }, { "epoch": 42.67605633802817, "grad_norm": 0.4376397728919983, "learning_rate": 2.2779e-05, "loss": 0.0172, "step": 7596 }, { "epoch": 42.68169014084507, "grad_norm": 0.5908669233322144, "learning_rate": 2.2782e-05, "loss": 0.0313, "step": 7597 }, { "epoch": 42.68732394366197, "grad_norm": 0.3561352491378784, "learning_rate": 2.2785e-05, "loss": 0.0107, "step": 7598 }, { "epoch": 42.692957746478875, "grad_norm": 0.29853174090385437, "learning_rate": 2.2788000000000003e-05, "loss": 0.0169, "step": 7599 }, { "epoch": 42.69859154929578, "grad_norm": 0.8564784526824951, "learning_rate": 2.2791000000000003e-05, "loss": 0.0165, "step": 7600 }, { "epoch": 42.70422535211267, "grad_norm": 0.487270712852478, "learning_rate": 2.2794000000000002e-05, "loss": 0.0203, "step": 7601 }, { "epoch": 42.709859154929575, "grad_norm": 0.3231433033943176, "learning_rate": 2.2797000000000002e-05, "loss": 0.0077, "step": 7602 }, { "epoch": 42.71549295774648, "grad_norm": 0.37228256464004517, "learning_rate": 2.2800000000000002e-05, "loss": 0.0127, "step": 7603 }, { "epoch": 42.72112676056338, "grad_norm": 0.36155253648757935, "learning_rate": 2.2803000000000002e-05, "loss": 0.0198, "step": 7604 }, { "epoch": 42.72676056338028, "grad_norm": 0.6274839639663696, "learning_rate": 2.2806e-05, "loss": 0.0104, "step": 7605 }, { "epoch": 42.732394366197184, "grad_norm": 0.6314296722412109, "learning_rate": 2.2809e-05, "loss": 0.0264, "step": 7606 }, { "epoch": 42.738028169014086, "grad_norm": 0.582661509513855, "learning_rate": 2.2812e-05, "loss": 0.0569, "step": 7607 }, { "epoch": 42.74366197183099, "grad_norm": 0.23447729647159576, "learning_rate": 2.2814999999999998e-05, "loss": 0.0026, "step": 7608 }, { "epoch": 42.74929577464789, "grad_norm": 0.5817813873291016, "learning_rate": 2.2818e-05, "loss": 0.0686, "step": 7609 }, { "epoch": 42.754929577464786, "grad_norm": 0.5315639972686768, "learning_rate": 2.2821e-05, "loss": 0.0698, "step": 7610 }, { "epoch": 42.76056338028169, "grad_norm": 0.5072256922721863, "learning_rate": 2.2824e-05, "loss": 0.0571, "step": 7611 }, { "epoch": 42.76619718309859, "grad_norm": 0.4988565444946289, "learning_rate": 2.2827e-05, "loss": 0.0988, "step": 7612 }, { "epoch": 42.77183098591549, "grad_norm": 0.7303663492202759, "learning_rate": 2.283e-05, "loss": 0.0684, "step": 7613 }, { "epoch": 42.777464788732395, "grad_norm": 0.46362096071243286, "learning_rate": 2.2833e-05, "loss": 0.0397, "step": 7614 }, { "epoch": 42.7830985915493, "grad_norm": 0.7120845913887024, "learning_rate": 2.2836e-05, "loss": 0.1051, "step": 7615 }, { "epoch": 42.7887323943662, "grad_norm": 0.9748067259788513, "learning_rate": 2.2839e-05, "loss": 0.0412, "step": 7616 }, { "epoch": 42.7943661971831, "grad_norm": 0.49141454696655273, "learning_rate": 2.2842e-05, "loss": 0.0548, "step": 7617 }, { "epoch": 42.8, "grad_norm": 0.5678000450134277, "learning_rate": 2.2845e-05, "loss": 0.0347, "step": 7618 }, { "epoch": 42.8056338028169, "grad_norm": 0.447583943605423, "learning_rate": 2.2848000000000002e-05, "loss": 0.0349, "step": 7619 }, { "epoch": 42.8112676056338, "grad_norm": 0.7057188153266907, "learning_rate": 2.2851000000000002e-05, "loss": 0.0423, "step": 7620 }, { "epoch": 42.816901408450704, "grad_norm": 0.5612456202507019, "learning_rate": 2.2854000000000002e-05, "loss": 0.0777, "step": 7621 }, { "epoch": 42.822535211267606, "grad_norm": 0.3823961317539215, "learning_rate": 2.2857e-05, "loss": 0.0373, "step": 7622 }, { "epoch": 42.82816901408451, "grad_norm": 0.7501652836799622, "learning_rate": 2.286e-05, "loss": 0.0431, "step": 7623 }, { "epoch": 42.83380281690141, "grad_norm": 0.5525383353233337, "learning_rate": 2.2863e-05, "loss": 0.0251, "step": 7624 }, { "epoch": 42.83943661971831, "grad_norm": 0.7335768342018127, "learning_rate": 2.2866e-05, "loss": 0.0222, "step": 7625 }, { "epoch": 42.84507042253521, "grad_norm": 0.6790626049041748, "learning_rate": 2.2869e-05, "loss": 0.0364, "step": 7626 }, { "epoch": 42.85070422535211, "grad_norm": 0.2618127465248108, "learning_rate": 2.2872e-05, "loss": 0.0103, "step": 7627 }, { "epoch": 42.85633802816901, "grad_norm": 0.413882851600647, "learning_rate": 2.2875e-05, "loss": 0.0161, "step": 7628 }, { "epoch": 42.861971830985915, "grad_norm": 0.6824060678482056, "learning_rate": 2.2878e-05, "loss": 0.0381, "step": 7629 }, { "epoch": 42.86760563380282, "grad_norm": 0.6541456580162048, "learning_rate": 2.2881000000000003e-05, "loss": 0.0191, "step": 7630 }, { "epoch": 42.87323943661972, "grad_norm": 0.6058534383773804, "learning_rate": 2.2884000000000003e-05, "loss": 0.025, "step": 7631 }, { "epoch": 42.87887323943662, "grad_norm": 0.48915794491767883, "learning_rate": 2.2887e-05, "loss": 0.0205, "step": 7632 }, { "epoch": 42.884507042253524, "grad_norm": 0.37270820140838623, "learning_rate": 2.289e-05, "loss": 0.0152, "step": 7633 }, { "epoch": 42.89014084507042, "grad_norm": 0.34175005555152893, "learning_rate": 2.2893e-05, "loss": 0.0206, "step": 7634 }, { "epoch": 42.89577464788732, "grad_norm": 0.6491833329200745, "learning_rate": 2.2896e-05, "loss": 0.0145, "step": 7635 }, { "epoch": 42.901408450704224, "grad_norm": 0.2978573739528656, "learning_rate": 2.2899e-05, "loss": 0.0087, "step": 7636 }, { "epoch": 42.907042253521126, "grad_norm": 0.42922282218933105, "learning_rate": 2.2902e-05, "loss": 0.0237, "step": 7637 }, { "epoch": 42.91267605633803, "grad_norm": 0.5128122568130493, "learning_rate": 2.2905e-05, "loss": 0.0345, "step": 7638 }, { "epoch": 42.91830985915493, "grad_norm": 0.4425894021987915, "learning_rate": 2.2907999999999998e-05, "loss": 0.0116, "step": 7639 }, { "epoch": 42.92394366197183, "grad_norm": 0.4023649990558624, "learning_rate": 2.2911e-05, "loss": 0.0102, "step": 7640 }, { "epoch": 42.929577464788736, "grad_norm": 0.7394270300865173, "learning_rate": 2.2914e-05, "loss": 0.0572, "step": 7641 }, { "epoch": 42.93521126760563, "grad_norm": 0.5984401106834412, "learning_rate": 2.2917e-05, "loss": 0.0608, "step": 7642 }, { "epoch": 42.94084507042253, "grad_norm": 0.2024155855178833, "learning_rate": 2.292e-05, "loss": 0.0048, "step": 7643 }, { "epoch": 42.946478873239435, "grad_norm": 0.6814282536506653, "learning_rate": 2.2923e-05, "loss": 0.0384, "step": 7644 }, { "epoch": 42.95211267605634, "grad_norm": 0.4636407196521759, "learning_rate": 2.2926e-05, "loss": 0.0318, "step": 7645 }, { "epoch": 42.95774647887324, "grad_norm": 0.3854784369468689, "learning_rate": 2.2929e-05, "loss": 0.023, "step": 7646 }, { "epoch": 42.96338028169014, "grad_norm": 0.3978540599346161, "learning_rate": 2.2932e-05, "loss": 0.02, "step": 7647 }, { "epoch": 42.969014084507045, "grad_norm": 0.7154507637023926, "learning_rate": 2.2935e-05, "loss": 0.012, "step": 7648 }, { "epoch": 42.97464788732395, "grad_norm": 0.49148163199424744, "learning_rate": 2.2938e-05, "loss": 0.0188, "step": 7649 }, { "epoch": 42.98028169014084, "grad_norm": 0.5564149022102356, "learning_rate": 2.2941000000000003e-05, "loss": 0.015, "step": 7650 }, { "epoch": 42.985915492957744, "grad_norm": 0.3121998608112335, "learning_rate": 2.2944000000000003e-05, "loss": 0.0214, "step": 7651 }, { "epoch": 42.99154929577465, "grad_norm": 0.38416385650634766, "learning_rate": 2.2947000000000002e-05, "loss": 0.0096, "step": 7652 }, { "epoch": 42.99718309859155, "grad_norm": 0.3769761025905609, "learning_rate": 2.2950000000000002e-05, "loss": 0.0241, "step": 7653 }, { "epoch": 43.0, "grad_norm": 0.27575987577438354, "learning_rate": 2.2953000000000002e-05, "loss": 0.0145, "step": 7654 }, { "epoch": 43.0056338028169, "grad_norm": 0.8059839606285095, "learning_rate": 2.2956000000000002e-05, "loss": 0.1802, "step": 7655 }, { "epoch": 43.011267605633805, "grad_norm": 0.5550584197044373, "learning_rate": 2.2959e-05, "loss": 0.0653, "step": 7656 }, { "epoch": 43.01690140845071, "grad_norm": 0.7003200650215149, "learning_rate": 2.2961999999999998e-05, "loss": 0.0627, "step": 7657 }, { "epoch": 43.02253521126761, "grad_norm": 0.5093281269073486, "learning_rate": 2.2964999999999998e-05, "loss": 0.0555, "step": 7658 }, { "epoch": 43.028169014084504, "grad_norm": 0.5814769864082336, "learning_rate": 2.2967999999999998e-05, "loss": 0.0655, "step": 7659 }, { "epoch": 43.03380281690141, "grad_norm": 0.49074047803878784, "learning_rate": 2.2971e-05, "loss": 0.0433, "step": 7660 }, { "epoch": 43.03943661971831, "grad_norm": 0.6694944500923157, "learning_rate": 2.2974e-05, "loss": 0.0487, "step": 7661 }, { "epoch": 43.04507042253521, "grad_norm": 0.589338481426239, "learning_rate": 2.2977e-05, "loss": 0.054, "step": 7662 }, { "epoch": 43.05070422535211, "grad_norm": 0.7815316915512085, "learning_rate": 2.298e-05, "loss": 0.0891, "step": 7663 }, { "epoch": 43.056338028169016, "grad_norm": 0.42382708191871643, "learning_rate": 2.2983e-05, "loss": 0.0241, "step": 7664 }, { "epoch": 43.06197183098592, "grad_norm": 0.4550093412399292, "learning_rate": 2.2986e-05, "loss": 0.0256, "step": 7665 }, { "epoch": 43.06760563380282, "grad_norm": 0.47115153074264526, "learning_rate": 2.2989e-05, "loss": 0.0289, "step": 7666 }, { "epoch": 43.073239436619716, "grad_norm": 0.3900357186794281, "learning_rate": 2.2992e-05, "loss": 0.0248, "step": 7667 }, { "epoch": 43.07887323943662, "grad_norm": 0.3748089671134949, "learning_rate": 2.2995e-05, "loss": 0.0427, "step": 7668 }, { "epoch": 43.08450704225352, "grad_norm": 0.4321984052658081, "learning_rate": 2.2998e-05, "loss": 0.0484, "step": 7669 }, { "epoch": 43.09014084507042, "grad_norm": 0.5039902925491333, "learning_rate": 2.3001000000000002e-05, "loss": 0.0367, "step": 7670 }, { "epoch": 43.095774647887325, "grad_norm": 0.7104202508926392, "learning_rate": 2.3004000000000002e-05, "loss": 0.0161, "step": 7671 }, { "epoch": 43.10140845070423, "grad_norm": 0.42201659083366394, "learning_rate": 2.3007000000000002e-05, "loss": 0.0185, "step": 7672 }, { "epoch": 43.10704225352113, "grad_norm": 0.6787857413291931, "learning_rate": 2.301e-05, "loss": 0.0224, "step": 7673 }, { "epoch": 43.11267605633803, "grad_norm": 0.535172700881958, "learning_rate": 2.3013e-05, "loss": 0.0236, "step": 7674 }, { "epoch": 43.11830985915493, "grad_norm": 0.5176582336425781, "learning_rate": 2.3016e-05, "loss": 0.063, "step": 7675 }, { "epoch": 43.12394366197183, "grad_norm": 0.3857332766056061, "learning_rate": 2.3019e-05, "loss": 0.0212, "step": 7676 }, { "epoch": 43.12957746478873, "grad_norm": 0.38682156801223755, "learning_rate": 2.3022e-05, "loss": 0.0135, "step": 7677 }, { "epoch": 43.135211267605634, "grad_norm": 0.30483633279800415, "learning_rate": 2.3025e-05, "loss": 0.0068, "step": 7678 }, { "epoch": 43.140845070422536, "grad_norm": 0.6348659992218018, "learning_rate": 2.3028e-05, "loss": 0.0427, "step": 7679 }, { "epoch": 43.14647887323944, "grad_norm": 0.585061252117157, "learning_rate": 2.3031000000000004e-05, "loss": 0.0149, "step": 7680 }, { "epoch": 43.15211267605634, "grad_norm": 0.8832511305809021, "learning_rate": 2.3034e-05, "loss": 0.0493, "step": 7681 }, { "epoch": 43.15774647887324, "grad_norm": 0.7753055095672607, "learning_rate": 2.3037e-05, "loss": 0.0133, "step": 7682 }, { "epoch": 43.16338028169014, "grad_norm": 0.16594181954860687, "learning_rate": 2.304e-05, "loss": 0.0056, "step": 7683 }, { "epoch": 43.16901408450704, "grad_norm": 0.7289854884147644, "learning_rate": 2.3043e-05, "loss": 0.0425, "step": 7684 }, { "epoch": 43.17464788732394, "grad_norm": 0.44492217898368835, "learning_rate": 2.3046e-05, "loss": 0.0119, "step": 7685 }, { "epoch": 43.180281690140845, "grad_norm": 0.5965486168861389, "learning_rate": 2.3049e-05, "loss": 0.0119, "step": 7686 }, { "epoch": 43.18591549295775, "grad_norm": 0.34880444407463074, "learning_rate": 2.3052e-05, "loss": 0.0264, "step": 7687 }, { "epoch": 43.19154929577465, "grad_norm": 0.5398271083831787, "learning_rate": 2.3055e-05, "loss": 0.0284, "step": 7688 }, { "epoch": 43.19718309859155, "grad_norm": 0.5733214616775513, "learning_rate": 2.3058e-05, "loss": 0.026, "step": 7689 }, { "epoch": 43.202816901408454, "grad_norm": 0.3548164367675781, "learning_rate": 2.3061e-05, "loss": 0.006, "step": 7690 }, { "epoch": 43.20845070422535, "grad_norm": 0.6810774207115173, "learning_rate": 2.3064e-05, "loss": 0.0303, "step": 7691 }, { "epoch": 43.21408450704225, "grad_norm": 0.37660372257232666, "learning_rate": 2.3067e-05, "loss": 0.0146, "step": 7692 }, { "epoch": 43.219718309859154, "grad_norm": 0.6594128012657166, "learning_rate": 2.307e-05, "loss": 0.0154, "step": 7693 }, { "epoch": 43.225352112676056, "grad_norm": 0.4684251844882965, "learning_rate": 2.3073e-05, "loss": 0.0107, "step": 7694 }, { "epoch": 43.23098591549296, "grad_norm": 0.27858006954193115, "learning_rate": 2.3076e-05, "loss": 0.0032, "step": 7695 }, { "epoch": 43.23661971830986, "grad_norm": 0.36725303530693054, "learning_rate": 2.3079e-05, "loss": 0.048, "step": 7696 }, { "epoch": 43.24225352112676, "grad_norm": 1.0225775241851807, "learning_rate": 2.3082e-05, "loss": 0.0468, "step": 7697 }, { "epoch": 43.247887323943665, "grad_norm": 0.3145623505115509, "learning_rate": 2.3085e-05, "loss": 0.0043, "step": 7698 }, { "epoch": 43.25352112676056, "grad_norm": 0.6813856363296509, "learning_rate": 2.3088e-05, "loss": 0.0682, "step": 7699 }, { "epoch": 43.25915492957746, "grad_norm": 0.601405143737793, "learning_rate": 2.3091000000000003e-05, "loss": 0.0528, "step": 7700 }, { "epoch": 43.264788732394365, "grad_norm": 0.5945401191711426, "learning_rate": 2.3094000000000003e-05, "loss": 0.0793, "step": 7701 }, { "epoch": 43.27042253521127, "grad_norm": 0.5333351492881775, "learning_rate": 2.3097000000000003e-05, "loss": 0.0652, "step": 7702 }, { "epoch": 43.27605633802817, "grad_norm": 0.5221846699714661, "learning_rate": 2.3100000000000002e-05, "loss": 0.0536, "step": 7703 }, { "epoch": 43.28169014084507, "grad_norm": 0.46064916253089905, "learning_rate": 2.3103000000000002e-05, "loss": 0.0449, "step": 7704 }, { "epoch": 43.287323943661974, "grad_norm": 0.46642810106277466, "learning_rate": 2.3106000000000002e-05, "loss": 0.0803, "step": 7705 }, { "epoch": 43.292957746478876, "grad_norm": 0.530270516872406, "learning_rate": 2.3109e-05, "loss": 0.0283, "step": 7706 }, { "epoch": 43.29859154929577, "grad_norm": 0.43694382905960083, "learning_rate": 2.3111999999999998e-05, "loss": 0.0259, "step": 7707 }, { "epoch": 43.304225352112674, "grad_norm": 0.675323486328125, "learning_rate": 2.3114999999999998e-05, "loss": 0.0302, "step": 7708 }, { "epoch": 43.309859154929576, "grad_norm": 0.48985791206359863, "learning_rate": 2.3117999999999998e-05, "loss": 0.031, "step": 7709 }, { "epoch": 43.31549295774648, "grad_norm": 0.4281580448150635, "learning_rate": 2.3121e-05, "loss": 0.0284, "step": 7710 }, { "epoch": 43.32112676056338, "grad_norm": 0.4794338643550873, "learning_rate": 2.3124e-05, "loss": 0.087, "step": 7711 }, { "epoch": 43.32676056338028, "grad_norm": 0.4156155586242676, "learning_rate": 2.3127e-05, "loss": 0.0162, "step": 7712 }, { "epoch": 43.332394366197185, "grad_norm": 0.3643184006214142, "learning_rate": 2.313e-05, "loss": 0.0272, "step": 7713 }, { "epoch": 43.33802816901409, "grad_norm": 0.655951738357544, "learning_rate": 2.3133e-05, "loss": 0.0177, "step": 7714 }, { "epoch": 43.34366197183098, "grad_norm": 0.37987813353538513, "learning_rate": 2.3136e-05, "loss": 0.0152, "step": 7715 }, { "epoch": 43.349295774647885, "grad_norm": 0.3153073787689209, "learning_rate": 2.3139e-05, "loss": 0.0271, "step": 7716 }, { "epoch": 43.35492957746479, "grad_norm": 0.3326621949672699, "learning_rate": 2.3142e-05, "loss": 0.0118, "step": 7717 }, { "epoch": 43.36056338028169, "grad_norm": 0.31567972898483276, "learning_rate": 2.3145e-05, "loss": 0.0118, "step": 7718 }, { "epoch": 43.36619718309859, "grad_norm": 0.3382684290409088, "learning_rate": 2.3148e-05, "loss": 0.0118, "step": 7719 }, { "epoch": 43.371830985915494, "grad_norm": 0.45913493633270264, "learning_rate": 2.3151000000000002e-05, "loss": 0.0143, "step": 7720 }, { "epoch": 43.3774647887324, "grad_norm": 0.432340532541275, "learning_rate": 2.3154000000000002e-05, "loss": 0.0402, "step": 7721 }, { "epoch": 43.3830985915493, "grad_norm": 0.30633747577667236, "learning_rate": 2.3157000000000002e-05, "loss": 0.0236, "step": 7722 }, { "epoch": 43.388732394366194, "grad_norm": 0.30705708265304565, "learning_rate": 2.3160000000000002e-05, "loss": 0.0075, "step": 7723 }, { "epoch": 43.394366197183096, "grad_norm": 0.6756173372268677, "learning_rate": 2.3163e-05, "loss": 0.029, "step": 7724 }, { "epoch": 43.4, "grad_norm": 0.3931417167186737, "learning_rate": 2.3166e-05, "loss": 0.0126, "step": 7725 }, { "epoch": 43.4056338028169, "grad_norm": 0.44718456268310547, "learning_rate": 2.3169e-05, "loss": 0.0158, "step": 7726 }, { "epoch": 43.4112676056338, "grad_norm": 0.6671685576438904, "learning_rate": 2.3172e-05, "loss": 0.0086, "step": 7727 }, { "epoch": 43.416901408450705, "grad_norm": 0.3465404808521271, "learning_rate": 2.3175e-05, "loss": 0.0422, "step": 7728 }, { "epoch": 43.42253521126761, "grad_norm": 0.5123358368873596, "learning_rate": 2.3178e-05, "loss": 0.0069, "step": 7729 }, { "epoch": 43.42816901408451, "grad_norm": 0.7065209746360779, "learning_rate": 2.3181000000000004e-05, "loss": 0.0149, "step": 7730 }, { "epoch": 43.433802816901405, "grad_norm": 0.26984116435050964, "learning_rate": 2.3184e-05, "loss": 0.0081, "step": 7731 }, { "epoch": 43.43943661971831, "grad_norm": 0.2501259744167328, "learning_rate": 2.3187e-05, "loss": 0.0065, "step": 7732 }, { "epoch": 43.44507042253521, "grad_norm": 0.5094054937362671, "learning_rate": 2.319e-05, "loss": 0.01, "step": 7733 }, { "epoch": 43.45070422535211, "grad_norm": 0.3882339298725128, "learning_rate": 2.3193e-05, "loss": 0.0091, "step": 7734 }, { "epoch": 43.456338028169014, "grad_norm": 0.37475985288619995, "learning_rate": 2.3196e-05, "loss": 0.0291, "step": 7735 }, { "epoch": 43.46197183098592, "grad_norm": 0.7619330286979675, "learning_rate": 2.3199e-05, "loss": 0.0102, "step": 7736 }, { "epoch": 43.46760563380282, "grad_norm": 0.38773101568222046, "learning_rate": 2.3202e-05, "loss": 0.0116, "step": 7737 }, { "epoch": 43.47323943661972, "grad_norm": 0.5937121510505676, "learning_rate": 2.3205e-05, "loss": 0.0303, "step": 7738 }, { "epoch": 43.478873239436616, "grad_norm": 0.3580731451511383, "learning_rate": 2.3208e-05, "loss": 0.0071, "step": 7739 }, { "epoch": 43.48450704225352, "grad_norm": 0.45670589804649353, "learning_rate": 2.3211000000000002e-05, "loss": 0.0217, "step": 7740 }, { "epoch": 43.49014084507042, "grad_norm": 0.8789461851119995, "learning_rate": 2.3214000000000002e-05, "loss": 0.0067, "step": 7741 }, { "epoch": 43.49577464788732, "grad_norm": 0.47310906648635864, "learning_rate": 2.3217e-05, "loss": 0.0096, "step": 7742 }, { "epoch": 43.501408450704226, "grad_norm": 0.9478322863578796, "learning_rate": 2.322e-05, "loss": 0.1092, "step": 7743 }, { "epoch": 43.50704225352113, "grad_norm": 0.554347574710846, "learning_rate": 2.3223e-05, "loss": 0.0586, "step": 7744 }, { "epoch": 43.51267605633803, "grad_norm": 0.5246697664260864, "learning_rate": 2.3226e-05, "loss": 0.0398, "step": 7745 }, { "epoch": 43.51830985915493, "grad_norm": 0.5951361060142517, "learning_rate": 2.3229e-05, "loss": 0.0592, "step": 7746 }, { "epoch": 43.52394366197183, "grad_norm": 0.5112597346305847, "learning_rate": 2.3232e-05, "loss": 0.087, "step": 7747 }, { "epoch": 43.52957746478873, "grad_norm": 0.5827110409736633, "learning_rate": 2.3235e-05, "loss": 0.089, "step": 7748 }, { "epoch": 43.53521126760563, "grad_norm": 0.5600595474243164, "learning_rate": 2.3238e-05, "loss": 0.0438, "step": 7749 }, { "epoch": 43.540845070422534, "grad_norm": 0.5587488412857056, "learning_rate": 2.3241000000000003e-05, "loss": 0.1232, "step": 7750 }, { "epoch": 43.54647887323944, "grad_norm": 0.5329261422157288, "learning_rate": 2.3244000000000003e-05, "loss": 0.0253, "step": 7751 }, { "epoch": 43.55211267605634, "grad_norm": 0.5664064288139343, "learning_rate": 2.3247000000000003e-05, "loss": 0.0457, "step": 7752 }, { "epoch": 43.55774647887324, "grad_norm": 1.34714937210083, "learning_rate": 2.3250000000000003e-05, "loss": 0.0609, "step": 7753 }, { "epoch": 43.563380281690144, "grad_norm": 0.430747926235199, "learning_rate": 2.3253000000000003e-05, "loss": 0.0264, "step": 7754 }, { "epoch": 43.56901408450704, "grad_norm": 0.40445226430892944, "learning_rate": 2.3256e-05, "loss": 0.0385, "step": 7755 }, { "epoch": 43.57464788732394, "grad_norm": 0.5296322703361511, "learning_rate": 2.3259e-05, "loss": 0.0385, "step": 7756 }, { "epoch": 43.58028169014084, "grad_norm": 0.7982978224754333, "learning_rate": 2.3262e-05, "loss": 0.059, "step": 7757 }, { "epoch": 43.585915492957746, "grad_norm": 0.5534262657165527, "learning_rate": 2.3265e-05, "loss": 0.0213, "step": 7758 }, { "epoch": 43.59154929577465, "grad_norm": 2.143131732940674, "learning_rate": 2.3267999999999998e-05, "loss": 0.0294, "step": 7759 }, { "epoch": 43.59718309859155, "grad_norm": 0.4914100766181946, "learning_rate": 2.3270999999999998e-05, "loss": 0.0265, "step": 7760 }, { "epoch": 43.60281690140845, "grad_norm": 0.4201594293117523, "learning_rate": 2.3274e-05, "loss": 0.0243, "step": 7761 }, { "epoch": 43.608450704225355, "grad_norm": 0.41743627190589905, "learning_rate": 2.3277e-05, "loss": 0.0172, "step": 7762 }, { "epoch": 43.61408450704225, "grad_norm": 0.6046962141990662, "learning_rate": 2.328e-05, "loss": 0.0177, "step": 7763 }, { "epoch": 43.61971830985915, "grad_norm": 0.5864201784133911, "learning_rate": 2.3283e-05, "loss": 0.0351, "step": 7764 }, { "epoch": 43.625352112676055, "grad_norm": 0.5053897500038147, "learning_rate": 2.3286e-05, "loss": 0.0311, "step": 7765 }, { "epoch": 43.63098591549296, "grad_norm": 0.6481769680976868, "learning_rate": 2.3289e-05, "loss": 0.013, "step": 7766 }, { "epoch": 43.63661971830986, "grad_norm": 0.49825263023376465, "learning_rate": 2.3292e-05, "loss": 0.0183, "step": 7767 }, { "epoch": 43.64225352112676, "grad_norm": 0.4717060327529907, "learning_rate": 2.3295e-05, "loss": 0.0214, "step": 7768 }, { "epoch": 43.647887323943664, "grad_norm": 0.6142826676368713, "learning_rate": 2.3298e-05, "loss": 0.0234, "step": 7769 }, { "epoch": 43.653521126760566, "grad_norm": 0.3175576329231262, "learning_rate": 2.3301e-05, "loss": 0.0067, "step": 7770 }, { "epoch": 43.65915492957747, "grad_norm": 0.8893284797668457, "learning_rate": 2.3304000000000003e-05, "loss": 0.0194, "step": 7771 }, { "epoch": 43.66478873239436, "grad_norm": 0.4514632821083069, "learning_rate": 2.3307000000000002e-05, "loss": 0.0237, "step": 7772 }, { "epoch": 43.670422535211266, "grad_norm": 0.6023856997489929, "learning_rate": 2.3310000000000002e-05, "loss": 0.0076, "step": 7773 }, { "epoch": 43.67605633802817, "grad_norm": 0.7528659701347351, "learning_rate": 2.3313000000000002e-05, "loss": 0.0304, "step": 7774 }, { "epoch": 43.68169014084507, "grad_norm": 0.4604734480381012, "learning_rate": 2.3316000000000002e-05, "loss": 0.0193, "step": 7775 }, { "epoch": 43.68732394366197, "grad_norm": 0.47784990072250366, "learning_rate": 2.3319e-05, "loss": 0.0642, "step": 7776 }, { "epoch": 43.692957746478875, "grad_norm": 0.38985463976860046, "learning_rate": 2.3322e-05, "loss": 0.0267, "step": 7777 }, { "epoch": 43.69859154929578, "grad_norm": 0.6520314812660217, "learning_rate": 2.3325e-05, "loss": 0.0116, "step": 7778 }, { "epoch": 43.70422535211267, "grad_norm": 0.6748648881912231, "learning_rate": 2.3328e-05, "loss": 0.0368, "step": 7779 }, { "epoch": 43.709859154929575, "grad_norm": 0.4058550000190735, "learning_rate": 2.3330999999999997e-05, "loss": 0.0066, "step": 7780 }, { "epoch": 43.71549295774648, "grad_norm": 0.7327492833137512, "learning_rate": 2.3334e-05, "loss": 0.0443, "step": 7781 }, { "epoch": 43.72112676056338, "grad_norm": 0.29558253288269043, "learning_rate": 2.3337e-05, "loss": 0.0061, "step": 7782 }, { "epoch": 43.72676056338028, "grad_norm": 0.2643970549106598, "learning_rate": 2.334e-05, "loss": 0.0043, "step": 7783 }, { "epoch": 43.732394366197184, "grad_norm": 0.5506502985954285, "learning_rate": 2.3343e-05, "loss": 0.0125, "step": 7784 }, { "epoch": 43.738028169014086, "grad_norm": 0.4857029318809509, "learning_rate": 2.3346e-05, "loss": 0.0077, "step": 7785 }, { "epoch": 43.74366197183099, "grad_norm": 0.4996969997882843, "learning_rate": 2.3349e-05, "loss": 0.0156, "step": 7786 }, { "epoch": 43.74929577464789, "grad_norm": 0.7265658378601074, "learning_rate": 2.3352e-05, "loss": 0.1147, "step": 7787 }, { "epoch": 43.754929577464786, "grad_norm": 0.728344202041626, "learning_rate": 2.3355e-05, "loss": 0.0611, "step": 7788 }, { "epoch": 43.76056338028169, "grad_norm": 0.5071502327919006, "learning_rate": 2.3358e-05, "loss": 0.0546, "step": 7789 }, { "epoch": 43.76619718309859, "grad_norm": 0.4709807336330414, "learning_rate": 2.3361e-05, "loss": 0.0747, "step": 7790 }, { "epoch": 43.77183098591549, "grad_norm": 0.4566911458969116, "learning_rate": 2.3364000000000002e-05, "loss": 0.0505, "step": 7791 }, { "epoch": 43.777464788732395, "grad_norm": 0.4724501967430115, "learning_rate": 2.3367000000000002e-05, "loss": 0.0406, "step": 7792 }, { "epoch": 43.7830985915493, "grad_norm": 0.4147586226463318, "learning_rate": 2.337e-05, "loss": 0.0321, "step": 7793 }, { "epoch": 43.7887323943662, "grad_norm": 0.36426207423210144, "learning_rate": 2.3373e-05, "loss": 0.0353, "step": 7794 }, { "epoch": 43.7943661971831, "grad_norm": 0.5589717626571655, "learning_rate": 2.3376e-05, "loss": 0.0516, "step": 7795 }, { "epoch": 43.8, "grad_norm": 0.5437612533569336, "learning_rate": 2.3379e-05, "loss": 0.0458, "step": 7796 }, { "epoch": 43.8056338028169, "grad_norm": 0.48390910029411316, "learning_rate": 2.3382e-05, "loss": 0.0458, "step": 7797 }, { "epoch": 43.8112676056338, "grad_norm": 0.4563766121864319, "learning_rate": 2.3385e-05, "loss": 0.0224, "step": 7798 }, { "epoch": 43.816901408450704, "grad_norm": 0.580437183380127, "learning_rate": 2.3388e-05, "loss": 0.0273, "step": 7799 }, { "epoch": 43.822535211267606, "grad_norm": 0.49449440836906433, "learning_rate": 2.3391e-05, "loss": 0.0753, "step": 7800 }, { "epoch": 43.82816901408451, "grad_norm": 0.6087822318077087, "learning_rate": 2.3394000000000003e-05, "loss": 0.0388, "step": 7801 }, { "epoch": 43.83380281690141, "grad_norm": 0.5396686792373657, "learning_rate": 2.3397000000000003e-05, "loss": 0.0439, "step": 7802 }, { "epoch": 43.83943661971831, "grad_norm": 0.5637728571891785, "learning_rate": 2.3400000000000003e-05, "loss": 0.0389, "step": 7803 }, { "epoch": 43.84507042253521, "grad_norm": 0.4135057032108307, "learning_rate": 2.3403e-05, "loss": 0.0289, "step": 7804 }, { "epoch": 43.85070422535211, "grad_norm": 0.4761960506439209, "learning_rate": 2.3406e-05, "loss": 0.0253, "step": 7805 }, { "epoch": 43.85633802816901, "grad_norm": 0.4166015684604645, "learning_rate": 2.3409e-05, "loss": 0.0178, "step": 7806 }, { "epoch": 43.861971830985915, "grad_norm": 0.5477655529975891, "learning_rate": 2.3412e-05, "loss": 0.0268, "step": 7807 }, { "epoch": 43.86760563380282, "grad_norm": 0.6108776330947876, "learning_rate": 2.3415e-05, "loss": 0.0193, "step": 7808 }, { "epoch": 43.87323943661972, "grad_norm": 1.124834418296814, "learning_rate": 2.3418e-05, "loss": 0.0232, "step": 7809 }, { "epoch": 43.87887323943662, "grad_norm": 0.4576795697212219, "learning_rate": 2.3420999999999998e-05, "loss": 0.0142, "step": 7810 }, { "epoch": 43.884507042253524, "grad_norm": 0.3883802890777588, "learning_rate": 2.3424e-05, "loss": 0.0167, "step": 7811 }, { "epoch": 43.89014084507042, "grad_norm": 0.35006600618362427, "learning_rate": 2.3427e-05, "loss": 0.0094, "step": 7812 }, { "epoch": 43.89577464788732, "grad_norm": 0.7222400903701782, "learning_rate": 2.343e-05, "loss": 0.0143, "step": 7813 }, { "epoch": 43.901408450704224, "grad_norm": 0.305726557970047, "learning_rate": 2.3433e-05, "loss": 0.0099, "step": 7814 }, { "epoch": 43.907042253521126, "grad_norm": 0.48615020513534546, "learning_rate": 2.3436e-05, "loss": 0.0077, "step": 7815 }, { "epoch": 43.91267605633803, "grad_norm": 0.5442679524421692, "learning_rate": 2.3439e-05, "loss": 0.0221, "step": 7816 }, { "epoch": 43.91830985915493, "grad_norm": 0.39150452613830566, "learning_rate": 2.3442e-05, "loss": 0.0078, "step": 7817 }, { "epoch": 43.92394366197183, "grad_norm": 0.24363622069358826, "learning_rate": 2.3445e-05, "loss": 0.0055, "step": 7818 }, { "epoch": 43.929577464788736, "grad_norm": 0.7385860681533813, "learning_rate": 2.3448e-05, "loss": 0.1085, "step": 7819 }, { "epoch": 43.93521126760563, "grad_norm": 0.3570079803466797, "learning_rate": 2.3451e-05, "loss": 0.0121, "step": 7820 }, { "epoch": 43.94084507042253, "grad_norm": 0.47008687257766724, "learning_rate": 2.3454000000000003e-05, "loss": 0.0229, "step": 7821 }, { "epoch": 43.946478873239435, "grad_norm": 0.37935754656791687, "learning_rate": 2.3457000000000003e-05, "loss": 0.0098, "step": 7822 }, { "epoch": 43.95211267605634, "grad_norm": 0.40146660804748535, "learning_rate": 2.3460000000000002e-05, "loss": 0.0135, "step": 7823 }, { "epoch": 43.95774647887324, "grad_norm": 0.30721864104270935, "learning_rate": 2.3463000000000002e-05, "loss": 0.0163, "step": 7824 }, { "epoch": 43.96338028169014, "grad_norm": 0.2836132049560547, "learning_rate": 2.3466000000000002e-05, "loss": 0.0126, "step": 7825 }, { "epoch": 43.969014084507045, "grad_norm": 0.5511935949325562, "learning_rate": 2.3469000000000002e-05, "loss": 0.0208, "step": 7826 }, { "epoch": 43.97464788732395, "grad_norm": 0.48826706409454346, "learning_rate": 2.3472e-05, "loss": 0.0198, "step": 7827 }, { "epoch": 43.98028169014084, "grad_norm": 0.39377129077911377, "learning_rate": 2.3475e-05, "loss": 0.0211, "step": 7828 }, { "epoch": 43.985915492957744, "grad_norm": 0.4754088222980499, "learning_rate": 2.3477999999999998e-05, "loss": 0.0188, "step": 7829 }, { "epoch": 43.99154929577465, "grad_norm": 0.8978520035743713, "learning_rate": 2.3480999999999998e-05, "loss": 0.0099, "step": 7830 }, { "epoch": 43.99718309859155, "grad_norm": 0.4303825795650482, "learning_rate": 2.3484e-05, "loss": 0.0364, "step": 7831 }, { "epoch": 44.0, "grad_norm": 0.32251137495040894, "learning_rate": 2.3487e-05, "loss": 0.005, "step": 7832 }, { "epoch": 44.0056338028169, "grad_norm": 0.6007819771766663, "learning_rate": 2.349e-05, "loss": 0.1089, "step": 7833 }, { "epoch": 44.011267605633805, "grad_norm": 0.38771694898605347, "learning_rate": 2.3493e-05, "loss": 0.0393, "step": 7834 }, { "epoch": 44.01690140845071, "grad_norm": 0.4451919496059418, "learning_rate": 2.3496e-05, "loss": 0.0381, "step": 7835 }, { "epoch": 44.02253521126761, "grad_norm": 0.5449560880661011, "learning_rate": 2.3499e-05, "loss": 0.0566, "step": 7836 }, { "epoch": 44.028169014084504, "grad_norm": 0.5299612283706665, "learning_rate": 2.3502e-05, "loss": 0.0294, "step": 7837 }, { "epoch": 44.03380281690141, "grad_norm": 0.42757725715637207, "learning_rate": 2.3505e-05, "loss": 0.033, "step": 7838 }, { "epoch": 44.03943661971831, "grad_norm": 0.4235852062702179, "learning_rate": 2.3508e-05, "loss": 0.0815, "step": 7839 }, { "epoch": 44.04507042253521, "grad_norm": 0.4618293046951294, "learning_rate": 2.3511e-05, "loss": 0.0248, "step": 7840 }, { "epoch": 44.05070422535211, "grad_norm": 0.3545951247215271, "learning_rate": 2.3514000000000002e-05, "loss": 0.027, "step": 7841 }, { "epoch": 44.056338028169016, "grad_norm": 0.6509636044502258, "learning_rate": 2.3517000000000002e-05, "loss": 0.0277, "step": 7842 }, { "epoch": 44.06197183098592, "grad_norm": 0.7352015972137451, "learning_rate": 2.3520000000000002e-05, "loss": 0.0413, "step": 7843 }, { "epoch": 44.06760563380282, "grad_norm": 0.5637580752372742, "learning_rate": 2.3523e-05, "loss": 0.0267, "step": 7844 }, { "epoch": 44.073239436619716, "grad_norm": 0.5546891093254089, "learning_rate": 2.3526e-05, "loss": 0.0351, "step": 7845 }, { "epoch": 44.07887323943662, "grad_norm": 0.5898036956787109, "learning_rate": 2.3529e-05, "loss": 0.0689, "step": 7846 }, { "epoch": 44.08450704225352, "grad_norm": 0.6557368636131287, "learning_rate": 2.3532e-05, "loss": 0.0411, "step": 7847 }, { "epoch": 44.09014084507042, "grad_norm": 0.40951815247535706, "learning_rate": 2.3535e-05, "loss": 0.0193, "step": 7848 }, { "epoch": 44.095774647887325, "grad_norm": 0.24424578249454498, "learning_rate": 2.3538e-05, "loss": 0.0088, "step": 7849 }, { "epoch": 44.10140845070423, "grad_norm": 0.5525507926940918, "learning_rate": 2.3541e-05, "loss": 0.0463, "step": 7850 }, { "epoch": 44.10704225352113, "grad_norm": 0.42989179491996765, "learning_rate": 2.3544000000000004e-05, "loss": 0.0157, "step": 7851 }, { "epoch": 44.11267605633803, "grad_norm": 0.5200693607330322, "learning_rate": 2.3547000000000003e-05, "loss": 0.0123, "step": 7852 }, { "epoch": 44.11830985915493, "grad_norm": 0.6253804564476013, "learning_rate": 2.3550000000000003e-05, "loss": 0.0599, "step": 7853 }, { "epoch": 44.12394366197183, "grad_norm": 0.5785319805145264, "learning_rate": 2.3553e-05, "loss": 0.0206, "step": 7854 }, { "epoch": 44.12957746478873, "grad_norm": 0.5267059803009033, "learning_rate": 2.3556e-05, "loss": 0.0281, "step": 7855 }, { "epoch": 44.135211267605634, "grad_norm": 0.33644533157348633, "learning_rate": 2.3559e-05, "loss": 0.007, "step": 7856 }, { "epoch": 44.140845070422536, "grad_norm": 0.4839067757129669, "learning_rate": 2.3562e-05, "loss": 0.0331, "step": 7857 }, { "epoch": 44.14647887323944, "grad_norm": 0.7550386190414429, "learning_rate": 2.3565e-05, "loss": 0.0193, "step": 7858 }, { "epoch": 44.15211267605634, "grad_norm": 0.6594158411026001, "learning_rate": 2.3568e-05, "loss": 0.0169, "step": 7859 }, { "epoch": 44.15774647887324, "grad_norm": 0.25154781341552734, "learning_rate": 2.3571e-05, "loss": 0.0094, "step": 7860 }, { "epoch": 44.16338028169014, "grad_norm": 0.3388271629810333, "learning_rate": 2.3574e-05, "loss": 0.009, "step": 7861 }, { "epoch": 44.16901408450704, "grad_norm": 0.7663984894752502, "learning_rate": 2.3577e-05, "loss": 0.0194, "step": 7862 }, { "epoch": 44.17464788732394, "grad_norm": 0.572506844997406, "learning_rate": 2.358e-05, "loss": 0.0124, "step": 7863 }, { "epoch": 44.180281690140845, "grad_norm": 0.2704383134841919, "learning_rate": 2.3583e-05, "loss": 0.0089, "step": 7864 }, { "epoch": 44.18591549295775, "grad_norm": 0.39870619773864746, "learning_rate": 2.3586e-05, "loss": 0.0525, "step": 7865 }, { "epoch": 44.19154929577465, "grad_norm": 0.434388130903244, "learning_rate": 2.3589e-05, "loss": 0.0152, "step": 7866 }, { "epoch": 44.19718309859155, "grad_norm": 0.7184556126594543, "learning_rate": 2.3592e-05, "loss": 0.0313, "step": 7867 }, { "epoch": 44.202816901408454, "grad_norm": 0.2690013647079468, "learning_rate": 2.3595e-05, "loss": 0.0088, "step": 7868 }, { "epoch": 44.20845070422535, "grad_norm": 0.8232795000076294, "learning_rate": 2.3598e-05, "loss": 0.0502, "step": 7869 }, { "epoch": 44.21408450704225, "grad_norm": 0.28519633412361145, "learning_rate": 2.3601e-05, "loss": 0.0046, "step": 7870 }, { "epoch": 44.219718309859154, "grad_norm": 0.43358469009399414, "learning_rate": 2.3604000000000003e-05, "loss": 0.0303, "step": 7871 }, { "epoch": 44.225352112676056, "grad_norm": 0.4407137632369995, "learning_rate": 2.3607000000000003e-05, "loss": 0.0128, "step": 7872 }, { "epoch": 44.23098591549296, "grad_norm": 0.3090610206127167, "learning_rate": 2.3610000000000003e-05, "loss": 0.0052, "step": 7873 }, { "epoch": 44.23661971830986, "grad_norm": 0.46105068922042847, "learning_rate": 2.3613000000000002e-05, "loss": 0.0149, "step": 7874 }, { "epoch": 44.24225352112676, "grad_norm": 0.6232210993766785, "learning_rate": 2.3616000000000002e-05, "loss": 0.0431, "step": 7875 }, { "epoch": 44.247887323943665, "grad_norm": 0.26909035444259644, "learning_rate": 2.3619000000000002e-05, "loss": 0.0044, "step": 7876 }, { "epoch": 44.25352112676056, "grad_norm": 1.041377305984497, "learning_rate": 2.3622000000000002e-05, "loss": 0.1387, "step": 7877 }, { "epoch": 44.25915492957746, "grad_norm": 0.5842006802558899, "learning_rate": 2.3624999999999998e-05, "loss": 0.043, "step": 7878 }, { "epoch": 44.264788732394365, "grad_norm": 0.7135234475135803, "learning_rate": 2.3627999999999998e-05, "loss": 0.0725, "step": 7879 }, { "epoch": 44.27042253521127, "grad_norm": 0.609305202960968, "learning_rate": 2.3630999999999998e-05, "loss": 0.0689, "step": 7880 }, { "epoch": 44.27605633802817, "grad_norm": 0.7104489803314209, "learning_rate": 2.3633999999999998e-05, "loss": 0.0654, "step": 7881 }, { "epoch": 44.28169014084507, "grad_norm": 0.5459648966789246, "learning_rate": 2.3637e-05, "loss": 0.0697, "step": 7882 }, { "epoch": 44.287323943661974, "grad_norm": 0.5418562889099121, "learning_rate": 2.364e-05, "loss": 0.0431, "step": 7883 }, { "epoch": 44.292957746478876, "grad_norm": 0.4977206289768219, "learning_rate": 2.3643e-05, "loss": 0.0485, "step": 7884 }, { "epoch": 44.29859154929577, "grad_norm": 0.44140222668647766, "learning_rate": 2.3646e-05, "loss": 0.0281, "step": 7885 }, { "epoch": 44.304225352112674, "grad_norm": 0.48337307572364807, "learning_rate": 2.3649e-05, "loss": 0.0496, "step": 7886 }, { "epoch": 44.309859154929576, "grad_norm": 0.6490774750709534, "learning_rate": 2.3652e-05, "loss": 0.0343, "step": 7887 }, { "epoch": 44.31549295774648, "grad_norm": 0.46146270632743835, "learning_rate": 2.3655e-05, "loss": 0.0258, "step": 7888 }, { "epoch": 44.32112676056338, "grad_norm": 0.4103142321109772, "learning_rate": 2.3658e-05, "loss": 0.032, "step": 7889 }, { "epoch": 44.32676056338028, "grad_norm": 0.581641435623169, "learning_rate": 2.3661e-05, "loss": 0.0302, "step": 7890 }, { "epoch": 44.332394366197185, "grad_norm": 0.49004504084587097, "learning_rate": 2.3664e-05, "loss": 0.0324, "step": 7891 }, { "epoch": 44.33802816901409, "grad_norm": 0.454850435256958, "learning_rate": 2.3667000000000002e-05, "loss": 0.0458, "step": 7892 }, { "epoch": 44.34366197183098, "grad_norm": 0.4524531364440918, "learning_rate": 2.3670000000000002e-05, "loss": 0.02, "step": 7893 }, { "epoch": 44.349295774647885, "grad_norm": 0.33930903673171997, "learning_rate": 2.3673000000000002e-05, "loss": 0.013, "step": 7894 }, { "epoch": 44.35492957746479, "grad_norm": 0.4395025968551636, "learning_rate": 2.3676e-05, "loss": 0.0135, "step": 7895 }, { "epoch": 44.36056338028169, "grad_norm": 0.3442915081977844, "learning_rate": 2.3679e-05, "loss": 0.0133, "step": 7896 }, { "epoch": 44.36619718309859, "grad_norm": 1.4321715831756592, "learning_rate": 2.3682e-05, "loss": 0.0159, "step": 7897 }, { "epoch": 44.371830985915494, "grad_norm": 0.5796653032302856, "learning_rate": 2.3685e-05, "loss": 0.0226, "step": 7898 }, { "epoch": 44.3774647887324, "grad_norm": 0.39902836084365845, "learning_rate": 2.3688e-05, "loss": 0.0125, "step": 7899 }, { "epoch": 44.3830985915493, "grad_norm": 0.3640649914741516, "learning_rate": 2.3691e-05, "loss": 0.0324, "step": 7900 }, { "epoch": 44.388732394366194, "grad_norm": 0.4194592237472534, "learning_rate": 2.3694e-05, "loss": 0.0126, "step": 7901 }, { "epoch": 44.394366197183096, "grad_norm": 0.5491151809692383, "learning_rate": 2.3697000000000004e-05, "loss": 0.0151, "step": 7902 }, { "epoch": 44.4, "grad_norm": 0.24446123838424683, "learning_rate": 2.37e-05, "loss": 0.0096, "step": 7903 }, { "epoch": 44.4056338028169, "grad_norm": 0.37560978531837463, "learning_rate": 2.3703e-05, "loss": 0.0129, "step": 7904 }, { "epoch": 44.4112676056338, "grad_norm": 0.20000191032886505, "learning_rate": 2.3706e-05, "loss": 0.006, "step": 7905 }, { "epoch": 44.416901408450705, "grad_norm": 0.32730725407600403, "learning_rate": 2.3709e-05, "loss": 0.0124, "step": 7906 }, { "epoch": 44.42253521126761, "grad_norm": 0.22342467308044434, "learning_rate": 2.3712e-05, "loss": 0.0063, "step": 7907 }, { "epoch": 44.42816901408451, "grad_norm": 0.3643922209739685, "learning_rate": 2.3715e-05, "loss": 0.0067, "step": 7908 }, { "epoch": 44.433802816901405, "grad_norm": 0.4679563343524933, "learning_rate": 2.3718e-05, "loss": 0.038, "step": 7909 }, { "epoch": 44.43943661971831, "grad_norm": 0.5846088528633118, "learning_rate": 2.3721e-05, "loss": 0.0088, "step": 7910 }, { "epoch": 44.44507042253521, "grad_norm": 0.2797967493534088, "learning_rate": 2.3724e-05, "loss": 0.0064, "step": 7911 }, { "epoch": 44.45070422535211, "grad_norm": 0.5628907680511475, "learning_rate": 2.3727000000000002e-05, "loss": 0.0106, "step": 7912 }, { "epoch": 44.456338028169014, "grad_norm": 0.9041396379470825, "learning_rate": 2.373e-05, "loss": 0.0049, "step": 7913 }, { "epoch": 44.46197183098592, "grad_norm": 0.3326779901981354, "learning_rate": 2.3733e-05, "loss": 0.0132, "step": 7914 }, { "epoch": 44.46760563380282, "grad_norm": 0.3412613272666931, "learning_rate": 2.3736e-05, "loss": 0.009, "step": 7915 }, { "epoch": 44.47323943661972, "grad_norm": 1.1166714429855347, "learning_rate": 2.3739e-05, "loss": 0.0241, "step": 7916 }, { "epoch": 44.478873239436616, "grad_norm": 0.27376049757003784, "learning_rate": 2.3742e-05, "loss": 0.0046, "step": 7917 }, { "epoch": 44.48450704225352, "grad_norm": 0.2324826717376709, "learning_rate": 2.3745e-05, "loss": 0.0157, "step": 7918 }, { "epoch": 44.49014084507042, "grad_norm": 0.9922760128974915, "learning_rate": 2.3748e-05, "loss": 0.0139, "step": 7919 }, { "epoch": 44.49577464788732, "grad_norm": 0.6253330111503601, "learning_rate": 2.3751e-05, "loss": 0.0169, "step": 7920 }, { "epoch": 44.501408450704226, "grad_norm": 0.9453456401824951, "learning_rate": 2.3754e-05, "loss": 0.0637, "step": 7921 }, { "epoch": 44.50704225352113, "grad_norm": 0.6878356337547302, "learning_rate": 2.3757000000000003e-05, "loss": 0.0794, "step": 7922 }, { "epoch": 44.51267605633803, "grad_norm": 0.5235702395439148, "learning_rate": 2.3760000000000003e-05, "loss": 0.0468, "step": 7923 }, { "epoch": 44.51830985915493, "grad_norm": 0.6037988662719727, "learning_rate": 2.3763000000000003e-05, "loss": 0.0785, "step": 7924 }, { "epoch": 44.52394366197183, "grad_norm": 0.44940826296806335, "learning_rate": 2.3766000000000003e-05, "loss": 0.0389, "step": 7925 }, { "epoch": 44.52957746478873, "grad_norm": 0.42800456285476685, "learning_rate": 2.3769000000000002e-05, "loss": 0.0511, "step": 7926 }, { "epoch": 44.53521126760563, "grad_norm": 0.34328916668891907, "learning_rate": 2.3772e-05, "loss": 0.0345, "step": 7927 }, { "epoch": 44.540845070422534, "grad_norm": 0.5195671319961548, "learning_rate": 2.3775e-05, "loss": 0.0864, "step": 7928 }, { "epoch": 44.54647887323944, "grad_norm": 0.8230679631233215, "learning_rate": 2.3778e-05, "loss": 0.1351, "step": 7929 }, { "epoch": 44.55211267605634, "grad_norm": 0.6358204483985901, "learning_rate": 2.3780999999999998e-05, "loss": 0.0248, "step": 7930 }, { "epoch": 44.55774647887324, "grad_norm": 0.4715672731399536, "learning_rate": 2.3783999999999998e-05, "loss": 0.0434, "step": 7931 }, { "epoch": 44.563380281690144, "grad_norm": 0.4513266384601593, "learning_rate": 2.3787e-05, "loss": 0.0393, "step": 7932 }, { "epoch": 44.56901408450704, "grad_norm": 0.3832801282405853, "learning_rate": 2.379e-05, "loss": 0.0213, "step": 7933 }, { "epoch": 44.57464788732394, "grad_norm": 0.4719560444355011, "learning_rate": 2.3793e-05, "loss": 0.0174, "step": 7934 }, { "epoch": 44.58028169014084, "grad_norm": 0.41757240891456604, "learning_rate": 2.3796e-05, "loss": 0.0149, "step": 7935 }, { "epoch": 44.585915492957746, "grad_norm": 0.3996347486972809, "learning_rate": 2.3799e-05, "loss": 0.0384, "step": 7936 }, { "epoch": 44.59154929577465, "grad_norm": 0.5936568379402161, "learning_rate": 2.3802e-05, "loss": 0.0326, "step": 7937 }, { "epoch": 44.59718309859155, "grad_norm": 0.5568337440490723, "learning_rate": 2.3805e-05, "loss": 0.0255, "step": 7938 }, { "epoch": 44.60281690140845, "grad_norm": 0.29552990198135376, "learning_rate": 2.3808e-05, "loss": 0.0164, "step": 7939 }, { "epoch": 44.608450704225355, "grad_norm": 0.38335803151130676, "learning_rate": 2.3811e-05, "loss": 0.0202, "step": 7940 }, { "epoch": 44.61408450704225, "grad_norm": 1.21063232421875, "learning_rate": 2.3814e-05, "loss": 0.0166, "step": 7941 }, { "epoch": 44.61971830985915, "grad_norm": 0.616303026676178, "learning_rate": 2.3817000000000003e-05, "loss": 0.0177, "step": 7942 }, { "epoch": 44.625352112676055, "grad_norm": 0.43892326951026917, "learning_rate": 2.3820000000000002e-05, "loss": 0.0475, "step": 7943 }, { "epoch": 44.63098591549296, "grad_norm": 0.2774001955986023, "learning_rate": 2.3823000000000002e-05, "loss": 0.008, "step": 7944 }, { "epoch": 44.63661971830986, "grad_norm": 0.4854055345058441, "learning_rate": 2.3826000000000002e-05, "loss": 0.0338, "step": 7945 }, { "epoch": 44.64225352112676, "grad_norm": 0.5037940144538879, "learning_rate": 2.3829000000000002e-05, "loss": 0.0161, "step": 7946 }, { "epoch": 44.647887323943664, "grad_norm": 0.61765056848526, "learning_rate": 2.3832e-05, "loss": 0.0188, "step": 7947 }, { "epoch": 44.653521126760566, "grad_norm": 0.3508298993110657, "learning_rate": 2.3835e-05, "loss": 0.0108, "step": 7948 }, { "epoch": 44.65915492957747, "grad_norm": 0.46717000007629395, "learning_rate": 2.3838e-05, "loss": 0.0119, "step": 7949 }, { "epoch": 44.66478873239436, "grad_norm": 0.5621916651725769, "learning_rate": 2.3841e-05, "loss": 0.0359, "step": 7950 }, { "epoch": 44.670422535211266, "grad_norm": 0.7262651324272156, "learning_rate": 2.3844e-05, "loss": 0.0104, "step": 7951 }, { "epoch": 44.67605633802817, "grad_norm": 0.7005552649497986, "learning_rate": 2.3847e-05, "loss": 0.0114, "step": 7952 }, { "epoch": 44.68169014084507, "grad_norm": 0.899017333984375, "learning_rate": 2.385e-05, "loss": 0.0409, "step": 7953 }, { "epoch": 44.68732394366197, "grad_norm": 0.6299459934234619, "learning_rate": 2.3853e-05, "loss": 0.0812, "step": 7954 }, { "epoch": 44.692957746478875, "grad_norm": 0.79371577501297, "learning_rate": 2.3856e-05, "loss": 0.0076, "step": 7955 }, { "epoch": 44.69859154929578, "grad_norm": 0.33741772174835205, "learning_rate": 2.3859e-05, "loss": 0.0061, "step": 7956 }, { "epoch": 44.70422535211267, "grad_norm": 0.2572935223579407, "learning_rate": 2.3862e-05, "loss": 0.0051, "step": 7957 }, { "epoch": 44.709859154929575, "grad_norm": 1.0033448934555054, "learning_rate": 2.3865e-05, "loss": 0.01, "step": 7958 }, { "epoch": 44.71549295774648, "grad_norm": 0.6115378141403198, "learning_rate": 2.3868e-05, "loss": 0.0345, "step": 7959 }, { "epoch": 44.72112676056338, "grad_norm": 0.7211076617240906, "learning_rate": 2.3871e-05, "loss": 0.0396, "step": 7960 }, { "epoch": 44.72676056338028, "grad_norm": 0.4242430031299591, "learning_rate": 2.3874e-05, "loss": 0.0121, "step": 7961 }, { "epoch": 44.732394366197184, "grad_norm": 0.9823033809661865, "learning_rate": 2.3877000000000002e-05, "loss": 0.0572, "step": 7962 }, { "epoch": 44.738028169014086, "grad_norm": 0.548136830329895, "learning_rate": 2.3880000000000002e-05, "loss": 0.0067, "step": 7963 }, { "epoch": 44.74366197183099, "grad_norm": 0.7422044277191162, "learning_rate": 2.3883e-05, "loss": 0.0269, "step": 7964 }, { "epoch": 44.74929577464789, "grad_norm": 1.027029037475586, "learning_rate": 2.3886e-05, "loss": 0.1711, "step": 7965 }, { "epoch": 44.754929577464786, "grad_norm": 0.6554110646247864, "learning_rate": 2.3889e-05, "loss": 0.0634, "step": 7966 }, { "epoch": 44.76056338028169, "grad_norm": 0.6957653164863586, "learning_rate": 2.3892e-05, "loss": 0.0526, "step": 7967 }, { "epoch": 44.76619718309859, "grad_norm": 0.6246751546859741, "learning_rate": 2.3895e-05, "loss": 0.0821, "step": 7968 }, { "epoch": 44.77183098591549, "grad_norm": 0.7641055583953857, "learning_rate": 2.3898e-05, "loss": 0.1015, "step": 7969 }, { "epoch": 44.777464788732395, "grad_norm": 0.4923984110355377, "learning_rate": 2.3901e-05, "loss": 0.0427, "step": 7970 }, { "epoch": 44.7830985915493, "grad_norm": 1.0806453227996826, "learning_rate": 2.3904e-05, "loss": 0.0705, "step": 7971 }, { "epoch": 44.7887323943662, "grad_norm": 0.6399094462394714, "learning_rate": 2.3907000000000003e-05, "loss": 0.0373, "step": 7972 }, { "epoch": 44.7943661971831, "grad_norm": 0.5332564115524292, "learning_rate": 2.3910000000000003e-05, "loss": 0.0389, "step": 7973 }, { "epoch": 44.8, "grad_norm": 0.5953487157821655, "learning_rate": 2.3913000000000003e-05, "loss": 0.0321, "step": 7974 }, { "epoch": 44.8056338028169, "grad_norm": 0.6122700572013855, "learning_rate": 2.3916000000000003e-05, "loss": 0.0471, "step": 7975 }, { "epoch": 44.8112676056338, "grad_norm": 1.3555333614349365, "learning_rate": 2.3919e-05, "loss": 0.0271, "step": 7976 }, { "epoch": 44.816901408450704, "grad_norm": 0.43390214443206787, "learning_rate": 2.3922e-05, "loss": 0.0296, "step": 7977 }, { "epoch": 44.822535211267606, "grad_norm": 0.4437839388847351, "learning_rate": 2.3925e-05, "loss": 0.1068, "step": 7978 }, { "epoch": 44.82816901408451, "grad_norm": 0.3576606810092926, "learning_rate": 2.3928e-05, "loss": 0.0192, "step": 7979 }, { "epoch": 44.83380281690141, "grad_norm": 0.552492082118988, "learning_rate": 2.3931e-05, "loss": 0.0387, "step": 7980 }, { "epoch": 44.83943661971831, "grad_norm": 0.47351229190826416, "learning_rate": 2.3933999999999998e-05, "loss": 0.0306, "step": 7981 }, { "epoch": 44.84507042253521, "grad_norm": 0.4636901021003723, "learning_rate": 2.3937e-05, "loss": 0.0341, "step": 7982 }, { "epoch": 44.85070422535211, "grad_norm": 0.5027878880500793, "learning_rate": 2.394e-05, "loss": 0.0188, "step": 7983 }, { "epoch": 44.85633802816901, "grad_norm": 0.5184895396232605, "learning_rate": 2.3943e-05, "loss": 0.026, "step": 7984 }, { "epoch": 44.861971830985915, "grad_norm": 0.5838266015052795, "learning_rate": 2.3946e-05, "loss": 0.0323, "step": 7985 }, { "epoch": 44.86760563380282, "grad_norm": 0.6231322288513184, "learning_rate": 2.3949e-05, "loss": 0.0304, "step": 7986 }, { "epoch": 44.87323943661972, "grad_norm": 4.252090930938721, "learning_rate": 2.3952e-05, "loss": 0.0315, "step": 7987 }, { "epoch": 44.87887323943662, "grad_norm": 0.6591243743896484, "learning_rate": 2.3955e-05, "loss": 0.0235, "step": 7988 }, { "epoch": 44.884507042253524, "grad_norm": 0.4051489233970642, "learning_rate": 2.3958e-05, "loss": 0.0234, "step": 7989 }, { "epoch": 44.89014084507042, "grad_norm": 0.997909426689148, "learning_rate": 2.3961e-05, "loss": 0.0314, "step": 7990 }, { "epoch": 44.89577464788732, "grad_norm": 0.6585088968276978, "learning_rate": 2.3964e-05, "loss": 0.0476, "step": 7991 }, { "epoch": 44.901408450704224, "grad_norm": 0.6483208537101746, "learning_rate": 2.3967000000000003e-05, "loss": 0.0071, "step": 7992 }, { "epoch": 44.907042253521126, "grad_norm": 0.4106220304965973, "learning_rate": 2.3970000000000003e-05, "loss": 0.0088, "step": 7993 }, { "epoch": 44.91267605633803, "grad_norm": 0.6806868314743042, "learning_rate": 2.3973000000000002e-05, "loss": 0.0416, "step": 7994 }, { "epoch": 44.91830985915493, "grad_norm": 0.5375381708145142, "learning_rate": 2.3976000000000002e-05, "loss": 0.032, "step": 7995 }, { "epoch": 44.92394366197183, "grad_norm": 0.3641299903392792, "learning_rate": 2.3979000000000002e-05, "loss": 0.0174, "step": 7996 }, { "epoch": 44.929577464788736, "grad_norm": 0.5465400218963623, "learning_rate": 2.3982000000000002e-05, "loss": 0.0372, "step": 7997 }, { "epoch": 44.93521126760563, "grad_norm": 0.49799075722694397, "learning_rate": 2.3985e-05, "loss": 0.0225, "step": 7998 }, { "epoch": 44.94084507042253, "grad_norm": 0.4281368851661682, "learning_rate": 2.3988e-05, "loss": 0.018, "step": 7999 }, { "epoch": 44.946478873239435, "grad_norm": 0.6678014397621155, "learning_rate": 2.3991e-05, "loss": 0.0226, "step": 8000 }, { "epoch": 44.946478873239435, "eval_cer": 0.1098841278481997, "eval_loss": 0.4477415084838867, "eval_runtime": 15.9259, "eval_samples_per_second": 19.088, "eval_steps_per_second": 0.628, "eval_wer": 0.39217191097467385, "step": 8000 } ], "logging_steps": 1.0, "max_steps": 100000, "num_input_tokens_seen": 0, "num_train_epochs": 565, "save_steps": 1000, "stateful_callbacks": { "EarlyStoppingCallback": { "args": { "early_stopping_patience": 5, "early_stopping_threshold": 0.0 }, "attributes": { "early_stopping_patience_counter": 4 } }, "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": false }, "attributes": {} } }, "total_flos": 1.6348340988395975e+20, "train_batch_size": 32, "trial_name": null, "trial_params": null }