|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9989801121876594, |
|
"eval_steps": 245, |
|
"global_step": 980, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002039775624681285, |
|
"grad_norm": 0.08994754403829575, |
|
"learning_rate": 2e-05, |
|
"loss": 2.6733, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002039775624681285, |
|
"eval_loss": 2.6805498600006104, |
|
"eval_runtime": 48.2372, |
|
"eval_samples_per_second": 17.124, |
|
"eval_steps_per_second": 2.156, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00407955124936257, |
|
"grad_norm": 0.09641231596469879, |
|
"learning_rate": 4e-05, |
|
"loss": 2.7875, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006119326874043855, |
|
"grad_norm": 0.10044872015714645, |
|
"learning_rate": 6e-05, |
|
"loss": 2.6516, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00815910249872514, |
|
"grad_norm": 0.08677750825881958, |
|
"learning_rate": 8e-05, |
|
"loss": 2.4109, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.010198878123406425, |
|
"grad_norm": 0.10869669914245605, |
|
"learning_rate": 0.0001, |
|
"loss": 2.6336, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01223865374808771, |
|
"grad_norm": 0.1180991679430008, |
|
"learning_rate": 0.00012, |
|
"loss": 2.6001, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.014278429372768995, |
|
"grad_norm": 0.1641693264245987, |
|
"learning_rate": 0.00014, |
|
"loss": 2.9137, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01631820499745028, |
|
"grad_norm": 0.17794281244277954, |
|
"learning_rate": 0.00016, |
|
"loss": 2.835, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.018357980622131564, |
|
"grad_norm": 0.16260501742362976, |
|
"learning_rate": 0.00018, |
|
"loss": 2.6015, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02039775624681285, |
|
"grad_norm": 0.13795699179172516, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2552, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.022437531871494134, |
|
"grad_norm": 0.15532873570919037, |
|
"learning_rate": 0.00019999947552365961, |
|
"loss": 2.4393, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02447730749617542, |
|
"grad_norm": 0.159002423286438, |
|
"learning_rate": 0.00019999790210013988, |
|
"loss": 2.7684, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.026517083120856707, |
|
"grad_norm": 0.17115084826946259, |
|
"learning_rate": 0.0001999952797459453, |
|
"loss": 2.2409, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02855685874553799, |
|
"grad_norm": 0.26108402013778687, |
|
"learning_rate": 0.0001999916084885832, |
|
"loss": 2.6388, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.030596634370219276, |
|
"grad_norm": 0.29758986830711365, |
|
"learning_rate": 0.00019998688836656323, |
|
"loss": 2.4358, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03263640999490056, |
|
"grad_norm": 0.2338196337223053, |
|
"learning_rate": 0.0001999811194293973, |
|
"loss": 2.3898, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03467618561958185, |
|
"grad_norm": 0.2143183797597885, |
|
"learning_rate": 0.00019997430173759875, |
|
"loss": 2.6874, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03671596124426313, |
|
"grad_norm": 0.19598309695720673, |
|
"learning_rate": 0.00019996643536268204, |
|
"loss": 2.409, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.038755736868944415, |
|
"grad_norm": 0.16046980023384094, |
|
"learning_rate": 0.00019995752038716168, |
|
"loss": 2.2378, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0407955124936257, |
|
"grad_norm": 0.15274696052074432, |
|
"learning_rate": 0.00019994755690455152, |
|
"loss": 2.3413, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04283528811830699, |
|
"grad_norm": 0.24761317670345306, |
|
"learning_rate": 0.0001999365450193638, |
|
"loss": 2.3382, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04487506374298827, |
|
"grad_norm": 0.2602517604827881, |
|
"learning_rate": 0.00019992448484710797, |
|
"loss": 2.3355, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.046914839367669554, |
|
"grad_norm": 0.25506100058555603, |
|
"learning_rate": 0.00019991137651428957, |
|
"loss": 2.4828, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04895461499235084, |
|
"grad_norm": 0.19493895769119263, |
|
"learning_rate": 0.0001998972201584088, |
|
"loss": 2.3154, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05099439061703213, |
|
"grad_norm": 0.16005843877792358, |
|
"learning_rate": 0.0001998820159279591, |
|
"loss": 2.1472, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.053034166241713414, |
|
"grad_norm": 0.17840257287025452, |
|
"learning_rate": 0.00019986576398242566, |
|
"loss": 2.1624, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05507394186639469, |
|
"grad_norm": 0.15280942618846893, |
|
"learning_rate": 0.0001998484644922837, |
|
"loss": 2.0526, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05711371749107598, |
|
"grad_norm": 0.16933050751686096, |
|
"learning_rate": 0.00019983011763899673, |
|
"loss": 2.2176, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.059153493115757266, |
|
"grad_norm": 0.21265609562397003, |
|
"learning_rate": 0.0001998107236150145, |
|
"loss": 2.2559, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06119326874043855, |
|
"grad_norm": 0.19340573251247406, |
|
"learning_rate": 0.00019979028262377118, |
|
"loss": 2.3202, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06323304436511984, |
|
"grad_norm": 0.16693681478500366, |
|
"learning_rate": 0.0001997687948796831, |
|
"loss": 2.0623, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06527281998980113, |
|
"grad_norm": 0.18830184638500214, |
|
"learning_rate": 0.00019974626060814647, |
|
"loss": 2.1929, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06731259561448241, |
|
"grad_norm": 0.16206099092960358, |
|
"learning_rate": 0.0001997226800455352, |
|
"loss": 2.3048, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0693523712391637, |
|
"grad_norm": 0.21650008857250214, |
|
"learning_rate": 0.00019969805343919821, |
|
"loss": 2.2928, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07139214686384497, |
|
"grad_norm": 0.19560708105564117, |
|
"learning_rate": 0.00019967238104745696, |
|
"loss": 2.4389, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07343192248852626, |
|
"grad_norm": 0.17428375780582428, |
|
"learning_rate": 0.00019964566313960264, |
|
"loss": 2.1017, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07547169811320754, |
|
"grad_norm": 0.14846819639205933, |
|
"learning_rate": 0.00019961789999589356, |
|
"loss": 2.0431, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07751147373788883, |
|
"grad_norm": 0.14796215295791626, |
|
"learning_rate": 0.00019958909190755187, |
|
"loss": 2.0363, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07955124936257012, |
|
"grad_norm": 0.17396488785743713, |
|
"learning_rate": 0.0001995592391767608, |
|
"loss": 2.0835, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0815910249872514, |
|
"grad_norm": 0.1914213001728058, |
|
"learning_rate": 0.0001995283421166614, |
|
"loss": 2.0242, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08363080061193269, |
|
"grad_norm": 0.16040053963661194, |
|
"learning_rate": 0.00019949640105134918, |
|
"loss": 2.2171, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08567057623661398, |
|
"grad_norm": 0.16291815042495728, |
|
"learning_rate": 0.00019946341631587087, |
|
"loss": 2.2144, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08771035186129526, |
|
"grad_norm": 0.16890890896320343, |
|
"learning_rate": 0.00019942938825622065, |
|
"loss": 2.0775, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08975012748597654, |
|
"grad_norm": 0.18898645043373108, |
|
"learning_rate": 0.0001993943172293368, |
|
"loss": 2.262, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09178990311065782, |
|
"grad_norm": 0.19105440378189087, |
|
"learning_rate": 0.00019935820360309777, |
|
"loss": 2.0232, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09382967873533911, |
|
"grad_norm": 0.17944768071174622, |
|
"learning_rate": 0.00019932104775631846, |
|
"loss": 2.2564, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0958694543600204, |
|
"grad_norm": 0.1691497266292572, |
|
"learning_rate": 0.0001992828500787461, |
|
"loss": 2.0249, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09790922998470168, |
|
"grad_norm": 0.18704521656036377, |
|
"learning_rate": 0.00019924361097105623, |
|
"loss": 2.2058, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09994900560938297, |
|
"grad_norm": 0.24133948981761932, |
|
"learning_rate": 0.00019920333084484857, |
|
"loss": 2.2835, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.10198878123406425, |
|
"grad_norm": 0.210649773478508, |
|
"learning_rate": 0.00019916201012264254, |
|
"loss": 2.1052, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10402855685874554, |
|
"grad_norm": 0.19624340534210205, |
|
"learning_rate": 0.00019911964923787295, |
|
"loss": 2.2474, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.10606833248342683, |
|
"grad_norm": 0.2529032230377197, |
|
"learning_rate": 0.0001990762486348855, |
|
"loss": 1.9782, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10810810810810811, |
|
"grad_norm": 0.2078029066324234, |
|
"learning_rate": 0.00019903180876893194, |
|
"loss": 2.3627, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11014788373278939, |
|
"grad_norm": 0.23153568804264069, |
|
"learning_rate": 0.00019898633010616542, |
|
"loss": 1.9749, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11218765935747067, |
|
"grad_norm": 0.20798815786838531, |
|
"learning_rate": 0.00019893981312363562, |
|
"loss": 2.111, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11422743498215196, |
|
"grad_norm": 0.20742167532444, |
|
"learning_rate": 0.00019889225830928365, |
|
"loss": 2.113, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.11626721060683325, |
|
"grad_norm": 0.21235893666744232, |
|
"learning_rate": 0.00019884366616193706, |
|
"loss": 2.0307, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11830698623151453, |
|
"grad_norm": 0.19754983484745026, |
|
"learning_rate": 0.0001987940371913044, |
|
"loss": 1.9883, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12034676185619582, |
|
"grad_norm": 0.20224173367023468, |
|
"learning_rate": 0.0001987433719179702, |
|
"loss": 2.0299, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1223865374808771, |
|
"grad_norm": 0.20431111752986908, |
|
"learning_rate": 0.00019869167087338907, |
|
"loss": 2.0, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12442631310555839, |
|
"grad_norm": 0.21355204284191132, |
|
"learning_rate": 0.00019863893459988062, |
|
"loss": 1.9365, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.12646608873023968, |
|
"grad_norm": 0.2394651174545288, |
|
"learning_rate": 0.00019858516365062334, |
|
"loss": 1.9264, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12850586435492095, |
|
"grad_norm": 0.20690159499645233, |
|
"learning_rate": 0.00019853035858964906, |
|
"loss": 1.9777, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13054563997960225, |
|
"grad_norm": 0.21029432117938995, |
|
"learning_rate": 0.00019847451999183694, |
|
"loss": 2.2063, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13258541560428352, |
|
"grad_norm": 0.2483580857515335, |
|
"learning_rate": 0.00019841764844290744, |
|
"loss": 1.9817, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13462519122896482, |
|
"grad_norm": 0.2200578898191452, |
|
"learning_rate": 0.0001983597445394162, |
|
"loss": 1.8286, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1366649668536461, |
|
"grad_norm": 0.23194506764411926, |
|
"learning_rate": 0.00019830080888874778, |
|
"loss": 1.8918, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1387047424783274, |
|
"grad_norm": 0.2092316597700119, |
|
"learning_rate": 0.00019824084210910925, |
|
"loss": 2.0326, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.14074451810300867, |
|
"grad_norm": 0.226849764585495, |
|
"learning_rate": 0.00019817984482952376, |
|
"loss": 2.1387, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14278429372768994, |
|
"grad_norm": 0.25519493222236633, |
|
"learning_rate": 0.0001981178176898239, |
|
"loss": 1.9261, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14482406935237124, |
|
"grad_norm": 0.2155541330575943, |
|
"learning_rate": 0.00019805476134064507, |
|
"loss": 2.2176, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.14686384497705252, |
|
"grad_norm": 0.23612210154533386, |
|
"learning_rate": 0.00019799067644341844, |
|
"loss": 2.1125, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.14890362060173382, |
|
"grad_norm": 0.2563931941986084, |
|
"learning_rate": 0.00019792556367036432, |
|
"loss": 1.9459, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1509433962264151, |
|
"grad_norm": 0.34834709763526917, |
|
"learning_rate": 0.0001978594237044849, |
|
"loss": 2.091, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1529831718510964, |
|
"grad_norm": 0.221855029463768, |
|
"learning_rate": 0.00019779225723955707, |
|
"loss": 1.8867, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15502294747577766, |
|
"grad_norm": 0.2613975405693054, |
|
"learning_rate": 0.0001977240649801253, |
|
"loss": 1.9889, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.15706272310045896, |
|
"grad_norm": 0.3117937743663788, |
|
"learning_rate": 0.00019765484764149415, |
|
"loss": 2.0743, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.15910249872514023, |
|
"grad_norm": 0.23428964614868164, |
|
"learning_rate": 0.00019758460594972068, |
|
"loss": 2.1752, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1611422743498215, |
|
"grad_norm": 0.23182815313339233, |
|
"learning_rate": 0.00019751334064160706, |
|
"loss": 1.9315, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1631820499745028, |
|
"grad_norm": 0.3055015206336975, |
|
"learning_rate": 0.00019744105246469263, |
|
"loss": 2.0348, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16522182559918408, |
|
"grad_norm": 0.22985045611858368, |
|
"learning_rate": 0.00019736774217724614, |
|
"loss": 1.792, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.16726160122386538, |
|
"grad_norm": 0.2585189938545227, |
|
"learning_rate": 0.00019729341054825782, |
|
"loss": 2.0945, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.16930137684854665, |
|
"grad_norm": 0.2798707187175751, |
|
"learning_rate": 0.00019721805835743134, |
|
"loss": 2.0114, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.17134115247322795, |
|
"grad_norm": 0.2886582016944885, |
|
"learning_rate": 0.00019714168639517544, |
|
"loss": 2.0575, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.17338092809790923, |
|
"grad_norm": 0.2944013178348541, |
|
"learning_rate": 0.00019706429546259593, |
|
"loss": 2.1074, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.17542070372259053, |
|
"grad_norm": 0.318002313375473, |
|
"learning_rate": 0.00019698588637148703, |
|
"loss": 1.9883, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1774604793472718, |
|
"grad_norm": 0.256496787071228, |
|
"learning_rate": 0.00019690645994432305, |
|
"loss": 1.8408, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.17950025497195307, |
|
"grad_norm": 0.3593447208404541, |
|
"learning_rate": 0.0001968260170142496, |
|
"loss": 1.9313, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.18154003059663437, |
|
"grad_norm": 0.4645783305168152, |
|
"learning_rate": 0.00019674455842507492, |
|
"loss": 1.9448, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.18357980622131564, |
|
"grad_norm": 0.2802218198776245, |
|
"learning_rate": 0.00019666208503126112, |
|
"loss": 1.9091, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18561958184599694, |
|
"grad_norm": 0.39699026942253113, |
|
"learning_rate": 0.00019657859769791505, |
|
"loss": 1.9936, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.18765935747067822, |
|
"grad_norm": 0.4515025317668915, |
|
"learning_rate": 0.00019649409730077935, |
|
"loss": 2.2269, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.18969913309535952, |
|
"grad_norm": 0.24775496125221252, |
|
"learning_rate": 0.00019640858472622316, |
|
"loss": 1.8843, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1917389087200408, |
|
"grad_norm": 0.35014575719833374, |
|
"learning_rate": 0.00019632206087123296, |
|
"loss": 2.0159, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1937786843447221, |
|
"grad_norm": 0.4293341040611267, |
|
"learning_rate": 0.00019623452664340306, |
|
"loss": 2.1129, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.19581845996940336, |
|
"grad_norm": 0.249364972114563, |
|
"learning_rate": 0.000196145982960926, |
|
"loss": 2.1385, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.19785823559408466, |
|
"grad_norm": 0.3796793222427368, |
|
"learning_rate": 0.00019605643075258321, |
|
"loss": 1.9737, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.19989801121876594, |
|
"grad_norm": 0.41410332918167114, |
|
"learning_rate": 0.00019596587095773495, |
|
"loss": 1.895, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2019377868434472, |
|
"grad_norm": 0.2810049057006836, |
|
"learning_rate": 0.0001958743045263106, |
|
"loss": 1.9859, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2039775624681285, |
|
"grad_norm": 0.37417009472846985, |
|
"learning_rate": 0.00019578173241879872, |
|
"loss": 1.9447, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.20601733809280978, |
|
"grad_norm": 0.33988890051841736, |
|
"learning_rate": 0.0001956881556062369, |
|
"loss": 1.8362, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.20805711371749108, |
|
"grad_norm": 0.27952101826667786, |
|
"learning_rate": 0.00019559357507020162, |
|
"loss": 1.9745, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.21009688934217235, |
|
"grad_norm": 0.24870562553405762, |
|
"learning_rate": 0.00019549799180279792, |
|
"loss": 1.8926, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.21213666496685366, |
|
"grad_norm": 0.33430561423301697, |
|
"learning_rate": 0.00019540140680664913, |
|
"loss": 2.0914, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.21417644059153493, |
|
"grad_norm": 0.3203825056552887, |
|
"learning_rate": 0.0001953038210948861, |
|
"loss": 2.0238, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.21621621621621623, |
|
"grad_norm": 0.2822887897491455, |
|
"learning_rate": 0.00019520523569113677, |
|
"loss": 2.053, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2182559918408975, |
|
"grad_norm": 0.31055620312690735, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 1.8936, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.22029576746557877, |
|
"grad_norm": 0.3920172452926636, |
|
"learning_rate": 0.0001950050699546116, |
|
"loss": 2.1061, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.22233554309026007, |
|
"grad_norm": 0.2737603187561035, |
|
"learning_rate": 0.00019490349172147963, |
|
"loss": 2.0772, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.22437531871494135, |
|
"grad_norm": 0.27526190876960754, |
|
"learning_rate": 0.00019480091799562704, |
|
"loss": 1.9078, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22641509433962265, |
|
"grad_norm": 0.3213340938091278, |
|
"learning_rate": 0.00019469734985300371, |
|
"loss": 1.8632, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.22845486996430392, |
|
"grad_norm": 0.3409796953201294, |
|
"learning_rate": 0.00019459278837999046, |
|
"loss": 1.8264, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.23049464558898522, |
|
"grad_norm": 0.2637316882610321, |
|
"learning_rate": 0.00019448723467338763, |
|
"loss": 1.9274, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2325344212136665, |
|
"grad_norm": 0.2738693952560425, |
|
"learning_rate": 0.00019438068984040365, |
|
"loss": 2.0925, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2345741968383478, |
|
"grad_norm": 0.43626368045806885, |
|
"learning_rate": 0.00019427315499864344, |
|
"loss": 1.9424, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.23661397246302907, |
|
"grad_norm": 0.3207686245441437, |
|
"learning_rate": 0.00019416463127609656, |
|
"loss": 1.8188, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.23865374808771037, |
|
"grad_norm": 0.28913381695747375, |
|
"learning_rate": 0.0001940551198111255, |
|
"loss": 1.9561, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.24069352371239164, |
|
"grad_norm": 0.3558366894721985, |
|
"learning_rate": 0.00019394462175245381, |
|
"loss": 1.8839, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2427332993370729, |
|
"grad_norm": 0.3230222463607788, |
|
"learning_rate": 0.0001938331382591537, |
|
"loss": 1.8197, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2447730749617542, |
|
"grad_norm": 0.31508007645606995, |
|
"learning_rate": 0.00019372067050063438, |
|
"loss": 2.0963, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24681285058643548, |
|
"grad_norm": 0.28743404150009155, |
|
"learning_rate": 0.00019360721965662933, |
|
"loss": 1.8382, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.24885262621111678, |
|
"grad_norm": 0.32556018233299255, |
|
"learning_rate": 0.00019349278691718427, |
|
"loss": 1.8255, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2508924018357981, |
|
"grad_norm": 0.3947349488735199, |
|
"learning_rate": 0.00019337737348264447, |
|
"loss": 2.1048, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.25293217746047936, |
|
"grad_norm": 0.280627965927124, |
|
"learning_rate": 0.00019326098056364222, |
|
"loss": 1.7557, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.25497195308516063, |
|
"grad_norm": 0.34356269240379333, |
|
"learning_rate": 0.00019314360938108425, |
|
"loss": 1.9692, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2570117287098419, |
|
"grad_norm": 0.30822035670280457, |
|
"learning_rate": 0.00019302526116613864, |
|
"loss": 1.9135, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2590515043345232, |
|
"grad_norm": 0.28359460830688477, |
|
"learning_rate": 0.00019290593716022217, |
|
"loss": 2.0667, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2610912799592045, |
|
"grad_norm": 0.2782716751098633, |
|
"learning_rate": 0.00019278563861498723, |
|
"loss": 1.7958, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2631310555838858, |
|
"grad_norm": 0.2571290135383606, |
|
"learning_rate": 0.00019266436679230865, |
|
"loss": 2.1198, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.26517083120856705, |
|
"grad_norm": 0.3336668908596039, |
|
"learning_rate": 0.00019254212296427044, |
|
"loss": 1.8137, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2672106068332483, |
|
"grad_norm": 0.23998981714248657, |
|
"learning_rate": 0.00019241890841315248, |
|
"loss": 2.0071, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.26925038245792965, |
|
"grad_norm": 0.3191507160663605, |
|
"learning_rate": 0.0001922947244314172, |
|
"loss": 2.0239, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2712901580826109, |
|
"grad_norm": 0.30696266889572144, |
|
"learning_rate": 0.0001921695723216957, |
|
"loss": 2.2377, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2733299337072922, |
|
"grad_norm": 0.2688175439834595, |
|
"learning_rate": 0.00019204345339677442, |
|
"loss": 1.8686, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.27536970933197347, |
|
"grad_norm": 0.27408865094184875, |
|
"learning_rate": 0.00019191636897958122, |
|
"loss": 2.0253, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2774094849566548, |
|
"grad_norm": 0.2917419672012329, |
|
"learning_rate": 0.00019178832040317155, |
|
"loss": 1.905, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.27944926058133607, |
|
"grad_norm": 0.27972346544265747, |
|
"learning_rate": 0.0001916593090107143, |
|
"loss": 1.8001, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.28148903620601734, |
|
"grad_norm": 0.2744503915309906, |
|
"learning_rate": 0.00019152933615547798, |
|
"loss": 1.9057, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2835288118306986, |
|
"grad_norm": 0.27640461921691895, |
|
"learning_rate": 0.0001913984032008163, |
|
"loss": 2.0683, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2855685874553799, |
|
"grad_norm": 0.28741398453712463, |
|
"learning_rate": 0.00019126651152015403, |
|
"loss": 1.9567, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2876083630800612, |
|
"grad_norm": 0.31334587931632996, |
|
"learning_rate": 0.0001911336624969725, |
|
"loss": 1.8974, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2896481387047425, |
|
"grad_norm": 0.2697933614253998, |
|
"learning_rate": 0.00019099985752479506, |
|
"loss": 1.8786, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.29168791432942376, |
|
"grad_norm": 0.2917918562889099, |
|
"learning_rate": 0.00019086509800717258, |
|
"loss": 2.0443, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.29372768995410503, |
|
"grad_norm": 0.2908966541290283, |
|
"learning_rate": 0.00019072938535766865, |
|
"loss": 1.9817, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.29576746557878636, |
|
"grad_norm": 0.2819146513938904, |
|
"learning_rate": 0.0001905927209998447, |
|
"loss": 2.0689, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.29780724120346763, |
|
"grad_norm": 0.3112149238586426, |
|
"learning_rate": 0.0001904551063672452, |
|
"loss": 1.9045, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2998470168281489, |
|
"grad_norm": 0.2511192560195923, |
|
"learning_rate": 0.00019031654290338254, |
|
"loss": 2.007, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.3018867924528302, |
|
"grad_norm": 0.2944158911705017, |
|
"learning_rate": 0.00019017703206172185, |
|
"loss": 1.9059, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.30392656807751145, |
|
"grad_norm": 0.2799970209598541, |
|
"learning_rate": 0.0001900365753056659, |
|
"loss": 1.8268, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3059663437021928, |
|
"grad_norm": 0.3119431436061859, |
|
"learning_rate": 0.00018989517410853955, |
|
"loss": 1.7999, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.30800611932687405, |
|
"grad_norm": 0.27174267172813416, |
|
"learning_rate": 0.00018975282995357446, |
|
"loss": 1.8971, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3100458949515553, |
|
"grad_norm": 0.3147140443325043, |
|
"learning_rate": 0.00018960954433389345, |
|
"loss": 1.9755, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3120856705762366, |
|
"grad_norm": 0.27466344833374023, |
|
"learning_rate": 0.00018946531875249493, |
|
"loss": 1.9819, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3141254462009179, |
|
"grad_norm": 0.2912920117378235, |
|
"learning_rate": 0.00018932015472223693, |
|
"loss": 2.1079, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3161652218255992, |
|
"grad_norm": 0.3123255968093872, |
|
"learning_rate": 0.00018917405376582145, |
|
"loss": 1.93, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.31820499745028047, |
|
"grad_norm": 0.3270852565765381, |
|
"learning_rate": 0.0001890270174157784, |
|
"loss": 1.8386, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.32024477307496174, |
|
"grad_norm": 0.30445969104766846, |
|
"learning_rate": 0.00018887904721444953, |
|
"loss": 2.1993, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.322284548699643, |
|
"grad_norm": 0.2974015474319458, |
|
"learning_rate": 0.00018873014471397224, |
|
"loss": 1.8965, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.32432432432432434, |
|
"grad_norm": 0.3236542046070099, |
|
"learning_rate": 0.00018858031147626325, |
|
"loss": 2.041, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3263640999490056, |
|
"grad_norm": 0.2958833575248718, |
|
"learning_rate": 0.00018842954907300236, |
|
"loss": 1.8681, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3284038755736869, |
|
"grad_norm": 0.27163368463516235, |
|
"learning_rate": 0.00018827785908561584, |
|
"loss": 1.9664, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.33044365119836816, |
|
"grad_norm": 0.2662605941295624, |
|
"learning_rate": 0.0001881252431052599, |
|
"loss": 1.9255, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3324834268230495, |
|
"grad_norm": 0.2995011508464813, |
|
"learning_rate": 0.00018797170273280388, |
|
"loss": 2.1172, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.33452320244773076, |
|
"grad_norm": 0.2997836768627167, |
|
"learning_rate": 0.00018781723957881372, |
|
"loss": 1.9044, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.33656297807241203, |
|
"grad_norm": 0.2867211401462555, |
|
"learning_rate": 0.0001876618552635348, |
|
"loss": 1.8, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3386027536970933, |
|
"grad_norm": 0.2972771227359772, |
|
"learning_rate": 0.000187505551416875, |
|
"loss": 1.7879, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3406425293217746, |
|
"grad_norm": 0.27393755316734314, |
|
"learning_rate": 0.00018734832967838775, |
|
"loss": 2.1209, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3426823049464559, |
|
"grad_norm": 0.31422799825668335, |
|
"learning_rate": 0.00018719019169725472, |
|
"loss": 1.9435, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3447220805711372, |
|
"grad_norm": 0.3372005224227905, |
|
"learning_rate": 0.00018703113913226847, |
|
"loss": 2.0911, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.34676185619581845, |
|
"grad_norm": 0.29995280504226685, |
|
"learning_rate": 0.00018687117365181512, |
|
"loss": 1.9077, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3488016318204997, |
|
"grad_norm": 0.2770789861679077, |
|
"learning_rate": 0.0001867102969338569, |
|
"loss": 1.7649, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.35084140744518105, |
|
"grad_norm": 0.2982628047466278, |
|
"learning_rate": 0.00018654851066591448, |
|
"loss": 2.0258, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3528811830698623, |
|
"grad_norm": 0.2924087643623352, |
|
"learning_rate": 0.0001863858165450492, |
|
"loss": 1.9533, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3549209586945436, |
|
"grad_norm": 0.29398518800735474, |
|
"learning_rate": 0.0001862222162778454, |
|
"loss": 1.8298, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.35696073431922487, |
|
"grad_norm": 0.34897300601005554, |
|
"learning_rate": 0.00018605771158039253, |
|
"loss": 1.7459, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.35900050994390614, |
|
"grad_norm": 0.33509624004364014, |
|
"learning_rate": 0.00018589230417826697, |
|
"loss": 1.8843, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.36104028556858747, |
|
"grad_norm": 0.30635756254196167, |
|
"learning_rate": 0.00018572599580651415, |
|
"loss": 2.0315, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.36308006119326874, |
|
"grad_norm": 0.3544027805328369, |
|
"learning_rate": 0.00018555878820963013, |
|
"loss": 1.9821, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.36511983681795, |
|
"grad_norm": 0.283241331577301, |
|
"learning_rate": 0.00018539068314154354, |
|
"loss": 1.7482, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3671596124426313, |
|
"grad_norm": 0.2630005478858948, |
|
"learning_rate": 0.00018522168236559695, |
|
"loss": 1.887, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3691993880673126, |
|
"grad_norm": 0.2816585898399353, |
|
"learning_rate": 0.00018505178765452853, |
|
"loss": 1.8995, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3712391636919939, |
|
"grad_norm": 0.30843281745910645, |
|
"learning_rate": 0.00018488100079045344, |
|
"loss": 1.943, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.37327893931667516, |
|
"grad_norm": 0.30149805545806885, |
|
"learning_rate": 0.00018470932356484508, |
|
"loss": 1.8764, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.37531871494135643, |
|
"grad_norm": 0.27851638197898865, |
|
"learning_rate": 0.00018453675777851627, |
|
"loss": 1.8901, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.37735849056603776, |
|
"grad_norm": 0.34025222063064575, |
|
"learning_rate": 0.00018436330524160047, |
|
"loss": 2.167, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.37939826619071904, |
|
"grad_norm": 0.3183801472187042, |
|
"learning_rate": 0.0001841889677735327, |
|
"loss": 1.9419, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3814380418154003, |
|
"grad_norm": 0.3072781562805176, |
|
"learning_rate": 0.00018401374720303056, |
|
"loss": 1.9817, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3834778174400816, |
|
"grad_norm": 0.2894771993160248, |
|
"learning_rate": 0.00018383764536807485, |
|
"loss": 2.114, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.38551759306476285, |
|
"grad_norm": 0.3198698163032532, |
|
"learning_rate": 0.0001836606641158905, |
|
"loss": 1.8609, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3875573686894442, |
|
"grad_norm": 0.3462139964103699, |
|
"learning_rate": 0.00018348280530292713, |
|
"loss": 1.7753, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.38959714431412545, |
|
"grad_norm": 0.2793361246585846, |
|
"learning_rate": 0.00018330407079483952, |
|
"loss": 2.1784, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3916369199388067, |
|
"grad_norm": 0.29519417881965637, |
|
"learning_rate": 0.0001831244624664681, |
|
"loss": 1.9627, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.393676695563488, |
|
"grad_norm": 0.3317961096763611, |
|
"learning_rate": 0.00018294398220181917, |
|
"loss": 1.7643, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3957164711881693, |
|
"grad_norm": 0.3138796091079712, |
|
"learning_rate": 0.0001827626318940454, |
|
"loss": 1.792, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3977562468128506, |
|
"grad_norm": 0.30172964930534363, |
|
"learning_rate": 0.00018258041344542566, |
|
"loss": 1.8828, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3997960224375319, |
|
"grad_norm": 0.3473678529262543, |
|
"learning_rate": 0.00018239732876734527, |
|
"loss": 1.9373, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.40183579806221315, |
|
"grad_norm": 0.3263239562511444, |
|
"learning_rate": 0.00018221337978027583, |
|
"loss": 1.9933, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.4038755736868944, |
|
"grad_norm": 0.32656848430633545, |
|
"learning_rate": 0.00018202856841375518, |
|
"loss": 1.8501, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.40591534931157575, |
|
"grad_norm": 0.3206334412097931, |
|
"learning_rate": 0.00018184289660636715, |
|
"loss": 2.0026, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.407955124936257, |
|
"grad_norm": 0.31711164116859436, |
|
"learning_rate": 0.0001816563663057211, |
|
"loss": 1.9573, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4099949005609383, |
|
"grad_norm": 0.2891688048839569, |
|
"learning_rate": 0.00018146897946843163, |
|
"loss": 1.939, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.41203467618561956, |
|
"grad_norm": 0.3304015100002289, |
|
"learning_rate": 0.000181280738060098, |
|
"loss": 1.874, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4140744518103009, |
|
"grad_norm": 0.2902586758136749, |
|
"learning_rate": 0.0001810916440552835, |
|
"loss": 1.8516, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.41611422743498216, |
|
"grad_norm": 0.3066134452819824, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 1.909, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.41815400305966344, |
|
"grad_norm": 0.2844925820827484, |
|
"learning_rate": 0.00018071090619916093, |
|
"loss": 1.8373, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4201937786843447, |
|
"grad_norm": 0.2905077338218689, |
|
"learning_rate": 0.00018051926634161282, |
|
"loss": 1.8031, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.422233554309026, |
|
"grad_norm": 0.3110411763191223, |
|
"learning_rate": 0.00018032678187506187, |
|
"loss": 1.9915, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4242733299337073, |
|
"grad_norm": 0.27634483575820923, |
|
"learning_rate": 0.00018013345481857903, |
|
"loss": 2.0072, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4263131055583886, |
|
"grad_norm": 0.29069051146507263, |
|
"learning_rate": 0.0001799392872000736, |
|
"loss": 2.0632, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.42835288118306986, |
|
"grad_norm": 0.28966760635375977, |
|
"learning_rate": 0.00017974428105627208, |
|
"loss": 1.8184, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.43039265680775113, |
|
"grad_norm": 0.2768760621547699, |
|
"learning_rate": 0.00017954843843269664, |
|
"loss": 1.8344, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.43243243243243246, |
|
"grad_norm": 0.30277568101882935, |
|
"learning_rate": 0.0001793517613836437, |
|
"loss": 1.931, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.43447220805711373, |
|
"grad_norm": 0.30833378434181213, |
|
"learning_rate": 0.00017915425197216245, |
|
"loss": 1.8319, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.436511983681795, |
|
"grad_norm": 0.2517772316932678, |
|
"learning_rate": 0.00017895591227003315, |
|
"loss": 2.0335, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4385517593064763, |
|
"grad_norm": 0.3051300346851349, |
|
"learning_rate": 0.00017875674435774547, |
|
"loss": 1.8523, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.44059153493115755, |
|
"grad_norm": 0.331875741481781, |
|
"learning_rate": 0.00017855675032447648, |
|
"loss": 1.902, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4426313105558389, |
|
"grad_norm": 0.3102109134197235, |
|
"learning_rate": 0.00017835593226806903, |
|
"loss": 1.9391, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.44467108618052015, |
|
"grad_norm": 0.28581124544143677, |
|
"learning_rate": 0.00017815429229500946, |
|
"loss": 1.9595, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.4467108618052014, |
|
"grad_norm": 0.2874554693698883, |
|
"learning_rate": 0.00017795183252040567, |
|
"loss": 1.8683, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.4487506374298827, |
|
"grad_norm": 0.3131530284881592, |
|
"learning_rate": 0.00017774855506796496, |
|
"loss": 2.003, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.450790413054564, |
|
"grad_norm": 0.3006989359855652, |
|
"learning_rate": 0.0001775444620699715, |
|
"loss": 1.8058, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4528301886792453, |
|
"grad_norm": 0.29621464014053345, |
|
"learning_rate": 0.0001773395556672644, |
|
"loss": 1.9781, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.45486996430392657, |
|
"grad_norm": 0.325095534324646, |
|
"learning_rate": 0.00017713383800921478, |
|
"loss": 1.8633, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.45690973992860784, |
|
"grad_norm": 0.306911826133728, |
|
"learning_rate": 0.00017692731125370354, |
|
"loss": 1.9318, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4589495155532891, |
|
"grad_norm": 0.335014283657074, |
|
"learning_rate": 0.00017671997756709863, |
|
"loss": 1.7637, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.46098929117797044, |
|
"grad_norm": 0.28670719265937805, |
|
"learning_rate": 0.00017651183912423228, |
|
"loss": 1.988, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4630290668026517, |
|
"grad_norm": 0.2969343364238739, |
|
"learning_rate": 0.00017630289810837834, |
|
"loss": 1.7984, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.465068842427333, |
|
"grad_norm": 0.3015127182006836, |
|
"learning_rate": 0.0001760931567112291, |
|
"loss": 1.7022, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.46710861805201426, |
|
"grad_norm": 0.32308632135391235, |
|
"learning_rate": 0.00017588261713287267, |
|
"loss": 1.8067, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4691483936766956, |
|
"grad_norm": 0.30818650126457214, |
|
"learning_rate": 0.00017567128158176953, |
|
"loss": 1.7096, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.47118816930137686, |
|
"grad_norm": 0.28488847613334656, |
|
"learning_rate": 0.00017545915227472965, |
|
"loss": 1.8784, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.47322794492605813, |
|
"grad_norm": 0.3801325857639313, |
|
"learning_rate": 0.00017524623143688902, |
|
"loss": 1.9555, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4752677205507394, |
|
"grad_norm": 0.31661075353622437, |
|
"learning_rate": 0.00017503252130168657, |
|
"loss": 1.8717, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.47730749617542073, |
|
"grad_norm": 0.296003520488739, |
|
"learning_rate": 0.00017481802411084042, |
|
"loss": 1.7293, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.479347271800102, |
|
"grad_norm": 0.3210139274597168, |
|
"learning_rate": 0.0001746027421143246, |
|
"loss": 1.9587, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4813870474247833, |
|
"grad_norm": 0.2968738079071045, |
|
"learning_rate": 0.00017438667757034546, |
|
"loss": 2.0743, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.48342682304946455, |
|
"grad_norm": 0.3373945355415344, |
|
"learning_rate": 0.00017416983274531775, |
|
"loss": 1.8239, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4854665986741458, |
|
"grad_norm": 0.29180705547332764, |
|
"learning_rate": 0.0001739522099138411, |
|
"loss": 1.9563, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.48750637429882715, |
|
"grad_norm": 0.342316597700119, |
|
"learning_rate": 0.00017373381135867604, |
|
"loss": 1.9012, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.4895461499235084, |
|
"grad_norm": 0.29859068989753723, |
|
"learning_rate": 0.00017351463937072004, |
|
"loss": 1.723, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4915859255481897, |
|
"grad_norm": 0.32331928610801697, |
|
"learning_rate": 0.0001732946962489836, |
|
"loss": 1.9278, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.49362570117287097, |
|
"grad_norm": 0.3604521155357361, |
|
"learning_rate": 0.00017307398430056593, |
|
"loss": 1.9691, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.4956654767975523, |
|
"grad_norm": 0.30965346097946167, |
|
"learning_rate": 0.000172852505840631, |
|
"loss": 1.7225, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.49770525242223357, |
|
"grad_norm": 0.2769593894481659, |
|
"learning_rate": 0.00017263026319238301, |
|
"loss": 1.7938, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.49974502804691484, |
|
"grad_norm": 0.33169788122177124, |
|
"learning_rate": 0.00017240725868704218, |
|
"loss": 1.6576, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.49974502804691484, |
|
"eval_loss": 1.874290943145752, |
|
"eval_runtime": 49.8203, |
|
"eval_samples_per_second": 16.58, |
|
"eval_steps_per_second": 2.088, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5017848036715962, |
|
"grad_norm": 0.3088040053844452, |
|
"learning_rate": 0.00017218349466382023, |
|
"loss": 1.7977, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5038245792962774, |
|
"grad_norm": 0.3247360289096832, |
|
"learning_rate": 0.0001719589734698959, |
|
"loss": 1.9595, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5058643549209587, |
|
"grad_norm": 0.30020302534103394, |
|
"learning_rate": 0.00017173369746039025, |
|
"loss": 1.7303, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.50790413054564, |
|
"grad_norm": 0.2880503833293915, |
|
"learning_rate": 0.00017150766899834204, |
|
"loss": 1.9203, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5099439061703213, |
|
"grad_norm": 0.2775571048259735, |
|
"learning_rate": 0.00017128089045468294, |
|
"loss": 1.8347, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5119836817950025, |
|
"grad_norm": 0.31517794728279114, |
|
"learning_rate": 0.00017105336420821247, |
|
"loss": 1.9258, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5140234574196838, |
|
"grad_norm": 0.315845251083374, |
|
"learning_rate": 0.0001708250926455733, |
|
"loss": 1.6125, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5160632330443651, |
|
"grad_norm": 0.3020716607570648, |
|
"learning_rate": 0.00017059607816122618, |
|
"loss": 2.036, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5181030086690463, |
|
"grad_norm": 0.3116091787815094, |
|
"learning_rate": 0.00017036632315742462, |
|
"loss": 1.7882, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5201427842937277, |
|
"grad_norm": 0.2845454216003418, |
|
"learning_rate": 0.00017013583004418993, |
|
"loss": 1.9165, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.522182559918409, |
|
"grad_norm": 0.3218507468700409, |
|
"learning_rate": 0.00016990460123928575, |
|
"loss": 1.9143, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5242223355430903, |
|
"grad_norm": 0.32271113991737366, |
|
"learning_rate": 0.00016967263916819287, |
|
"loss": 1.7233, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5262621111677716, |
|
"grad_norm": 0.32956361770629883, |
|
"learning_rate": 0.00016943994626408363, |
|
"loss": 2.1151, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5283018867924528, |
|
"grad_norm": 0.309712678194046, |
|
"learning_rate": 0.0001692065249677965, |
|
"loss": 1.7285, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5303416624171341, |
|
"grad_norm": 0.3099533021450043, |
|
"learning_rate": 0.00016897237772781044, |
|
"loss": 1.8461, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5323814380418154, |
|
"grad_norm": 0.2932775914669037, |
|
"learning_rate": 0.00016873750700021915, |
|
"loss": 1.869, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5344212136664966, |
|
"grad_norm": 0.2912542223930359, |
|
"learning_rate": 0.00016850191524870546, |
|
"loss": 1.7672, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.536460989291178, |
|
"grad_norm": 0.33142998814582825, |
|
"learning_rate": 0.00016826560494451537, |
|
"loss": 1.8723, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5385007649158593, |
|
"grad_norm": 0.26213347911834717, |
|
"learning_rate": 0.00016802857856643215, |
|
"loss": 1.6971, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 0.2854675352573395, |
|
"learning_rate": 0.00016779083860075033, |
|
"loss": 1.8517, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5425803161652218, |
|
"grad_norm": 0.3256230354309082, |
|
"learning_rate": 0.00016755238754124965, |
|
"loss": 1.9159, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5446200917899031, |
|
"grad_norm": 0.30096253752708435, |
|
"learning_rate": 0.00016731322788916892, |
|
"loss": 1.7229, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5466598674145844, |
|
"grad_norm": 0.2952723801136017, |
|
"learning_rate": 0.00016707336215317968, |
|
"loss": 1.8191, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5486996430392657, |
|
"grad_norm": 0.32182615995407104, |
|
"learning_rate": 0.00016683279284936004, |
|
"loss": 1.9188, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5507394186639469, |
|
"grad_norm": 0.3065682351589203, |
|
"learning_rate": 0.00016659152250116812, |
|
"loss": 1.5902, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5527791942886282, |
|
"grad_norm": 0.27141043543815613, |
|
"learning_rate": 0.00016634955363941574, |
|
"loss": 1.705, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5548189699133096, |
|
"grad_norm": 0.3703926205635071, |
|
"learning_rate": 0.00016610688880224178, |
|
"loss": 1.9515, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5568587455379909, |
|
"grad_norm": 0.3434322476387024, |
|
"learning_rate": 0.0001658635305350855, |
|
"loss": 1.6887, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5588985211626721, |
|
"grad_norm": 0.2847291827201843, |
|
"learning_rate": 0.00016561948139065996, |
|
"loss": 2.0415, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5609382967873534, |
|
"grad_norm": 0.27591603994369507, |
|
"learning_rate": 0.00016537474392892528, |
|
"loss": 1.8893, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5629780724120347, |
|
"grad_norm": 0.32474270462989807, |
|
"learning_rate": 0.00016512932071706152, |
|
"loss": 1.8589, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.565017848036716, |
|
"grad_norm": 0.33138149976730347, |
|
"learning_rate": 0.0001648832143294422, |
|
"loss": 1.7978, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5670576236613972, |
|
"grad_norm": 0.321053683757782, |
|
"learning_rate": 0.0001646364273476067, |
|
"loss": 1.911, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5690973992860785, |
|
"grad_norm": 0.313310444355011, |
|
"learning_rate": 0.00016438896236023375, |
|
"loss": 1.7816, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5711371749107598, |
|
"grad_norm": 0.2893736958503723, |
|
"learning_rate": 0.000164140821963114, |
|
"loss": 1.7197, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5731769505354412, |
|
"grad_norm": 0.3043624758720398, |
|
"learning_rate": 0.00016389200875912278, |
|
"loss": 1.7415, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5752167261601224, |
|
"grad_norm": 0.29861506819725037, |
|
"learning_rate": 0.00016364252535819282, |
|
"loss": 1.7317, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5772565017848037, |
|
"grad_norm": 0.3631903827190399, |
|
"learning_rate": 0.000163392374377287, |
|
"loss": 1.8116, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.579296277409485, |
|
"grad_norm": 0.32387158274650574, |
|
"learning_rate": 0.00016314155844037074, |
|
"loss": 1.8652, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5813360530341662, |
|
"grad_norm": 0.3442007899284363, |
|
"learning_rate": 0.00016289008017838445, |
|
"loss": 2.0466, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5833758286588475, |
|
"grad_norm": 0.3226166367530823, |
|
"learning_rate": 0.0001626379422292162, |
|
"loss": 1.8803, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5854156042835288, |
|
"grad_norm": 0.2765788733959198, |
|
"learning_rate": 0.00016238514723767374, |
|
"loss": 1.8048, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5874553799082101, |
|
"grad_norm": 0.26808875799179077, |
|
"learning_rate": 0.0001621316978554569, |
|
"loss": 1.9358, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5894951555328913, |
|
"grad_norm": 0.34248560667037964, |
|
"learning_rate": 0.00016187759674112973, |
|
"loss": 1.7614, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5915349311575727, |
|
"grad_norm": 0.3485107123851776, |
|
"learning_rate": 0.00016162284656009274, |
|
"loss": 1.7487, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.593574706782254, |
|
"grad_norm": 0.31153398752212524, |
|
"learning_rate": 0.00016136744998455476, |
|
"loss": 2.0895, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5956144824069353, |
|
"grad_norm": 0.3104468584060669, |
|
"learning_rate": 0.00016111140969350503, |
|
"loss": 1.6566, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5976542580316165, |
|
"grad_norm": 0.32697245478630066, |
|
"learning_rate": 0.00016085472837268502, |
|
"loss": 1.7631, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5996940336562978, |
|
"grad_norm": 0.3330870270729065, |
|
"learning_rate": 0.00016059740871456036, |
|
"loss": 1.6047, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6017338092809791, |
|
"grad_norm": 0.3144790828227997, |
|
"learning_rate": 0.00016033945341829248, |
|
"loss": 1.5975, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6037735849056604, |
|
"grad_norm": 0.32482099533081055, |
|
"learning_rate": 0.00016008086518971037, |
|
"loss": 1.9879, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6058133605303416, |
|
"grad_norm": 0.320336252450943, |
|
"learning_rate": 0.0001598216467412822, |
|
"loss": 1.9723, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6078531361550229, |
|
"grad_norm": 0.2934703528881073, |
|
"learning_rate": 0.00015956180079208682, |
|
"loss": 1.7771, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6098929117797043, |
|
"grad_norm": 0.3081587851047516, |
|
"learning_rate": 0.0001593013300677853, |
|
"loss": 1.869, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6119326874043856, |
|
"grad_norm": 0.30365437269210815, |
|
"learning_rate": 0.00015904023730059228, |
|
"loss": 1.795, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6139724630290668, |
|
"grad_norm": 0.3306022882461548, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 1.8786, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6160122386537481, |
|
"grad_norm": 0.3081189692020416, |
|
"learning_rate": 0.00015851619659898623, |
|
"loss": 1.8389, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.6180520142784294, |
|
"grad_norm": 0.3126586973667145, |
|
"learning_rate": 0.00015825325416151222, |
|
"loss": 1.8404, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6200917899031106, |
|
"grad_norm": 0.2832873463630676, |
|
"learning_rate": 0.000157989700674967, |
|
"loss": 1.9894, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6221315655277919, |
|
"grad_norm": 0.3080177903175354, |
|
"learning_rate": 0.00015772553890390197, |
|
"loss": 1.9813, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6241713411524732, |
|
"grad_norm": 0.30326464772224426, |
|
"learning_rate": 0.00015746077161924905, |
|
"loss": 1.7597, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6262111167771545, |
|
"grad_norm": 0.3071492910385132, |
|
"learning_rate": 0.00015719540159829184, |
|
"loss": 1.8149, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6282508924018358, |
|
"grad_norm": 0.3333245515823364, |
|
"learning_rate": 0.00015692943162463628, |
|
"loss": 1.9806, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6302906680265171, |
|
"grad_norm": 0.2858702838420868, |
|
"learning_rate": 0.0001566628644881815, |
|
"loss": 1.7616, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6323304436511984, |
|
"grad_norm": 0.27514535188674927, |
|
"learning_rate": 0.00015639570298509064, |
|
"loss": 1.7516, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6343702192758797, |
|
"grad_norm": 0.29323917627334595, |
|
"learning_rate": 0.00015612794991776147, |
|
"loss": 1.8444, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6364099949005609, |
|
"grad_norm": 0.3227652907371521, |
|
"learning_rate": 0.00015585960809479696, |
|
"loss": 1.8715, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6384497705252422, |
|
"grad_norm": 0.2898751497268677, |
|
"learning_rate": 0.00015559068033097582, |
|
"loss": 1.9242, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6404895461499235, |
|
"grad_norm": 0.2990049421787262, |
|
"learning_rate": 0.00015532116944722308, |
|
"loss": 1.7412, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6425293217746048, |
|
"grad_norm": 0.28128162026405334, |
|
"learning_rate": 0.00015505107827058036, |
|
"loss": 1.7801, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.644569097399286, |
|
"grad_norm": 0.32478880882263184, |
|
"learning_rate": 0.0001547804096341763, |
|
"loss": 1.681, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6466088730239674, |
|
"grad_norm": 0.31724509596824646, |
|
"learning_rate": 0.00015450916637719684, |
|
"loss": 1.9405, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6486486486486487, |
|
"grad_norm": 0.36439332365989685, |
|
"learning_rate": 0.00015423735134485536, |
|
"loss": 1.7013, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.65068842427333, |
|
"grad_norm": 0.3085692226886749, |
|
"learning_rate": 0.00015396496738836292, |
|
"loss": 1.7641, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6527281998980112, |
|
"grad_norm": 0.2774830758571625, |
|
"learning_rate": 0.0001536920173648984, |
|
"loss": 1.6727, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6547679755226925, |
|
"grad_norm": 0.3212391138076782, |
|
"learning_rate": 0.0001534185041375783, |
|
"loss": 1.7221, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.6568077511473738, |
|
"grad_norm": 0.30237263441085815, |
|
"learning_rate": 0.00015314443057542703, |
|
"loss": 1.7456, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.658847526772055, |
|
"grad_norm": 0.2800453305244446, |
|
"learning_rate": 0.00015286979955334652, |
|
"loss": 1.7978, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.6608873023967363, |
|
"grad_norm": 0.28402194380760193, |
|
"learning_rate": 0.00015259461395208628, |
|
"loss": 1.8785, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.6629270780214176, |
|
"grad_norm": 0.3377262353897095, |
|
"learning_rate": 0.000152318876658213, |
|
"loss": 1.7113, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.664966853646099, |
|
"grad_norm": 0.376544326543808, |
|
"learning_rate": 0.00015204259056408046, |
|
"loss": 1.6237, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6670066292707802, |
|
"grad_norm": 0.3138863742351532, |
|
"learning_rate": 0.00015176575856779904, |
|
"loss": 1.8357, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.6690464048954615, |
|
"grad_norm": 0.40277865529060364, |
|
"learning_rate": 0.00015148838357320537, |
|
"loss": 1.9038, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6710861805201428, |
|
"grad_norm": 0.2998698055744171, |
|
"learning_rate": 0.0001512104684898319, |
|
"loss": 1.7583, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.6731259561448241, |
|
"grad_norm": 0.33020147681236267, |
|
"learning_rate": 0.00015093201623287631, |
|
"loss": 1.7446, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6751657317695053, |
|
"grad_norm": 0.3545725643634796, |
|
"learning_rate": 0.00015065302972317108, |
|
"loss": 1.8882, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6772055073941866, |
|
"grad_norm": 0.2989177107810974, |
|
"learning_rate": 0.00015037351188715265, |
|
"loss": 1.7751, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6792452830188679, |
|
"grad_norm": 0.3225865662097931, |
|
"learning_rate": 0.00015009346565683087, |
|
"loss": 1.8539, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6812850586435492, |
|
"grad_norm": 0.31957486271858215, |
|
"learning_rate": 0.00014981289396975817, |
|
"loss": 1.6078, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6833248342682305, |
|
"grad_norm": 0.323512464761734, |
|
"learning_rate": 0.00014953179976899878, |
|
"loss": 1.9539, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6853646098929118, |
|
"grad_norm": 0.3451762795448303, |
|
"learning_rate": 0.00014925018600309785, |
|
"loss": 1.8435, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6874043855175931, |
|
"grad_norm": 0.3474057614803314, |
|
"learning_rate": 0.0001489680556260505, |
|
"loss": 1.8385, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6894441611422744, |
|
"grad_norm": 0.3119589686393738, |
|
"learning_rate": 0.00014868541159727096, |
|
"loss": 1.7582, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6914839367669556, |
|
"grad_norm": 0.3289264738559723, |
|
"learning_rate": 0.0001484022568815613, |
|
"loss": 1.8391, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6935237123916369, |
|
"grad_norm": 0.32644519209861755, |
|
"learning_rate": 0.00014811859444908052, |
|
"loss": 1.7676, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6955634880163182, |
|
"grad_norm": 0.2529926002025604, |
|
"learning_rate": 0.00014783442727531328, |
|
"loss": 1.6483, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6976032636409994, |
|
"grad_norm": 0.39409515261650085, |
|
"learning_rate": 0.00014754975834103877, |
|
"loss": 1.8951, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6996430392656807, |
|
"grad_norm": 0.312379390001297, |
|
"learning_rate": 0.00014726459063229945, |
|
"loss": 1.8388, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.7016828148903621, |
|
"grad_norm": 0.3048081696033478, |
|
"learning_rate": 0.00014697892714036958, |
|
"loss": 1.9482, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.7037225905150434, |
|
"grad_norm": 0.37214845418930054, |
|
"learning_rate": 0.00014669277086172406, |
|
"loss": 1.8466, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7057623661397247, |
|
"grad_norm": 0.3273617923259735, |
|
"learning_rate": 0.00014640612479800686, |
|
"loss": 1.5934, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.7078021417644059, |
|
"grad_norm": 0.3283670246601105, |
|
"learning_rate": 0.00014611899195599953, |
|
"loss": 1.7837, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.7098419173890872, |
|
"grad_norm": 0.38148233294487, |
|
"learning_rate": 0.00014583137534758967, |
|
"loss": 1.8952, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.7118816930137685, |
|
"grad_norm": 0.35573315620422363, |
|
"learning_rate": 0.0001455432779897395, |
|
"loss": 1.6698, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7139214686384497, |
|
"grad_norm": 0.362224280834198, |
|
"learning_rate": 0.00014525470290445392, |
|
"loss": 1.7698, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.715961244263131, |
|
"grad_norm": 0.3312818706035614, |
|
"learning_rate": 0.00014496565311874902, |
|
"loss": 1.7951, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.7180010198878123, |
|
"grad_norm": 0.3737257719039917, |
|
"learning_rate": 0.00014467613166462023, |
|
"loss": 1.9562, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.7200407955124937, |
|
"grad_norm": 0.37132444977760315, |
|
"learning_rate": 0.0001443861415790107, |
|
"loss": 1.9022, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.7220805711371749, |
|
"grad_norm": 0.30398017168045044, |
|
"learning_rate": 0.00014409568590377918, |
|
"loss": 2.0578, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7241203467618562, |
|
"grad_norm": 0.3135487139225006, |
|
"learning_rate": 0.00014380476768566824, |
|
"loss": 1.777, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7261601223865375, |
|
"grad_norm": 0.3089943826198578, |
|
"learning_rate": 0.00014351338997627234, |
|
"loss": 1.638, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7281998980112188, |
|
"grad_norm": 0.2791310250759125, |
|
"learning_rate": 0.00014322155583200576, |
|
"loss": 1.7786, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7302396736359, |
|
"grad_norm": 0.30986955761909485, |
|
"learning_rate": 0.00014292926831407061, |
|
"loss": 1.7467, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.7322794492605813, |
|
"grad_norm": 0.29783159494400024, |
|
"learning_rate": 0.0001426365304884246, |
|
"loss": 1.6486, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7343192248852626, |
|
"grad_norm": 0.29579076170921326, |
|
"learning_rate": 0.00014234334542574906, |
|
"loss": 1.665, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7363590005099439, |
|
"grad_norm": 0.31017670035362244, |
|
"learning_rate": 0.00014204971620141647, |
|
"loss": 1.7287, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.7383987761346252, |
|
"grad_norm": 0.34222882986068726, |
|
"learning_rate": 0.00014175564589545854, |
|
"loss": 1.7475, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.7404385517593065, |
|
"grad_norm": 0.2956671714782715, |
|
"learning_rate": 0.00014146113759253362, |
|
"loss": 1.757, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.7424783273839878, |
|
"grad_norm": 0.35265278816223145, |
|
"learning_rate": 0.0001411661943818944, |
|
"loss": 1.6731, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.744518103008669, |
|
"grad_norm": 0.3606823682785034, |
|
"learning_rate": 0.00014087081935735564, |
|
"loss": 1.8684, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7465578786333503, |
|
"grad_norm": 0.27310827374458313, |
|
"learning_rate": 0.00014057501561726157, |
|
"loss": 1.7656, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.7485976542580316, |
|
"grad_norm": 0.3064682185649872, |
|
"learning_rate": 0.0001402787862644534, |
|
"loss": 1.7646, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.7506374298827129, |
|
"grad_norm": 0.40648531913757324, |
|
"learning_rate": 0.0001399821344062369, |
|
"loss": 1.672, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7526772055073941, |
|
"grad_norm": 0.28727683424949646, |
|
"learning_rate": 0.00013968506315434974, |
|
"loss": 1.5766, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7547169811320755, |
|
"grad_norm": 0.30638012290000916, |
|
"learning_rate": 0.00013938757562492873, |
|
"loss": 1.6456, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7567567567567568, |
|
"grad_norm": 0.3163774311542511, |
|
"learning_rate": 0.0001390896749384773, |
|
"loss": 1.8618, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.7587965323814381, |
|
"grad_norm": 0.36798760294914246, |
|
"learning_rate": 0.00013879136421983266, |
|
"loss": 2.1013, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.7608363080061193, |
|
"grad_norm": 0.30383849143981934, |
|
"learning_rate": 0.00013849264659813312, |
|
"loss": 1.7117, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.7628760836308006, |
|
"grad_norm": 0.35814371705055237, |
|
"learning_rate": 0.0001381935252067852, |
|
"loss": 1.9741, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.7649158592554819, |
|
"grad_norm": 0.29710692167282104, |
|
"learning_rate": 0.00013789400318343068, |
|
"loss": 1.5538, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7669556348801632, |
|
"grad_norm": 0.30160966515541077, |
|
"learning_rate": 0.0001375940836699139, |
|
"loss": 1.7604, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.7689954105048444, |
|
"grad_norm": 0.28164952993392944, |
|
"learning_rate": 0.0001372937698122487, |
|
"loss": 1.736, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.7710351861295257, |
|
"grad_norm": 0.293729692697525, |
|
"learning_rate": 0.0001369930647605852, |
|
"loss": 1.9099, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.7730749617542071, |
|
"grad_norm": 0.28284940123558044, |
|
"learning_rate": 0.00013669197166917723, |
|
"loss": 1.7251, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.7751147373788884, |
|
"grad_norm": 0.26181384921073914, |
|
"learning_rate": 0.00013639049369634876, |
|
"loss": 1.6381, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7771545130035696, |
|
"grad_norm": 0.3301055431365967, |
|
"learning_rate": 0.00013608863400446113, |
|
"loss": 1.998, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7791942886282509, |
|
"grad_norm": 0.3116731643676758, |
|
"learning_rate": 0.00013578639575987958, |
|
"loss": 1.8277, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7812340642529322, |
|
"grad_norm": 0.30362340807914734, |
|
"learning_rate": 0.0001354837821329404, |
|
"loss": 1.888, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7832738398776135, |
|
"grad_norm": 0.29050615429878235, |
|
"learning_rate": 0.00013518079629791724, |
|
"loss": 1.7116, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7853136155022947, |
|
"grad_norm": 0.29679572582244873, |
|
"learning_rate": 0.00013487744143298822, |
|
"loss": 1.6066, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.787353391126976, |
|
"grad_norm": 0.3293478488922119, |
|
"learning_rate": 0.0001345737207202023, |
|
"loss": 1.7222, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7893931667516573, |
|
"grad_norm": 0.34148016571998596, |
|
"learning_rate": 0.000134269637345446, |
|
"loss": 1.7243, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7914329423763387, |
|
"grad_norm": 0.31480157375335693, |
|
"learning_rate": 0.00013396519449841005, |
|
"loss": 1.8812, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7934727180010199, |
|
"grad_norm": 0.2998044192790985, |
|
"learning_rate": 0.0001336603953725559, |
|
"loss": 1.9205, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7955124936257012, |
|
"grad_norm": 0.33707278966903687, |
|
"learning_rate": 0.00013335524316508208, |
|
"loss": 1.9246, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7975522692503825, |
|
"grad_norm": 0.33658990263938904, |
|
"learning_rate": 0.00013304974107689087, |
|
"loss": 1.849, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7995920448750637, |
|
"grad_norm": 0.2800214886665344, |
|
"learning_rate": 0.00013274389231255466, |
|
"loss": 1.7607, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.801631820499745, |
|
"grad_norm": 0.29528602957725525, |
|
"learning_rate": 0.00013243770008028224, |
|
"loss": 1.6146, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.8036715961244263, |
|
"grad_norm": 0.316123902797699, |
|
"learning_rate": 0.00013213116759188523, |
|
"loss": 1.7578, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.8057113717491076, |
|
"grad_norm": 0.339609295129776, |
|
"learning_rate": 0.0001318242980627444, |
|
"loss": 1.6968, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8077511473737888, |
|
"grad_norm": 0.29195457696914673, |
|
"learning_rate": 0.00013151709471177588, |
|
"loss": 1.6553, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.8097909229984702, |
|
"grad_norm": 0.3093162477016449, |
|
"learning_rate": 0.00013120956076139746, |
|
"loss": 1.6833, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.8118306986231515, |
|
"grad_norm": 0.343049019575119, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 1.717, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.8138704742478328, |
|
"grad_norm": 0.30858704447746277, |
|
"learning_rate": 0.0001305935139693874, |
|
"loss": 1.5495, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.815910249872514, |
|
"grad_norm": 0.28386467695236206, |
|
"learning_rate": 0.00013028500758979506, |
|
"loss": 1.8721, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8179500254971953, |
|
"grad_norm": 0.3261226713657379, |
|
"learning_rate": 0.00012997618353480377, |
|
"loss": 1.7956, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.8199898011218766, |
|
"grad_norm": 0.3289451003074646, |
|
"learning_rate": 0.00012966704504383168, |
|
"loss": 1.7322, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.8220295767465579, |
|
"grad_norm": 0.3098496198654175, |
|
"learning_rate": 0.00012935759535959528, |
|
"loss": 1.7224, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.8240693523712391, |
|
"grad_norm": 0.2830435335636139, |
|
"learning_rate": 0.00012904783772807533, |
|
"loss": 1.7597, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.8261091279959204, |
|
"grad_norm": 0.3040255010128021, |
|
"learning_rate": 0.00012873777539848283, |
|
"loss": 1.8111, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8281489036206018, |
|
"grad_norm": 0.30722421407699585, |
|
"learning_rate": 0.00012842741162322487, |
|
"loss": 1.6308, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.8301886792452831, |
|
"grad_norm": 0.28365135192871094, |
|
"learning_rate": 0.00012811674965787056, |
|
"loss": 1.932, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.8322284548699643, |
|
"grad_norm": 0.26628825068473816, |
|
"learning_rate": 0.00012780579276111702, |
|
"loss": 1.5047, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.8342682304946456, |
|
"grad_norm": 0.3449050486087799, |
|
"learning_rate": 0.00012749454419475487, |
|
"loss": 1.9364, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.8363080061193269, |
|
"grad_norm": 0.2961723208427429, |
|
"learning_rate": 0.0001271830072236343, |
|
"loss": 1.6638, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8383477817440081, |
|
"grad_norm": 0.2808418273925781, |
|
"learning_rate": 0.00012687118511563075, |
|
"loss": 1.6743, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.8403875573686894, |
|
"grad_norm": 0.30065590143203735, |
|
"learning_rate": 0.0001265590811416105, |
|
"loss": 1.8681, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.8424273329933707, |
|
"grad_norm": 0.311669260263443, |
|
"learning_rate": 0.0001262466985753967, |
|
"loss": 1.7563, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.844467108618052, |
|
"grad_norm": 0.31335267424583435, |
|
"learning_rate": 0.0001259340406937345, |
|
"loss": 1.9591, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.8465068842427333, |
|
"grad_norm": 0.306470662355423, |
|
"learning_rate": 0.00012562111077625722, |
|
"loss": 1.7165, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8485466598674146, |
|
"grad_norm": 0.278089314699173, |
|
"learning_rate": 0.00012530791210545162, |
|
"loss": 1.7441, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.8505864354920959, |
|
"grad_norm": 0.28886333107948303, |
|
"learning_rate": 0.00012499444796662353, |
|
"loss": 1.7301, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.8526262111167772, |
|
"grad_norm": 0.31261351704597473, |
|
"learning_rate": 0.0001246807216478634, |
|
"loss": 2.062, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.8546659867414584, |
|
"grad_norm": 0.3184990882873535, |
|
"learning_rate": 0.00012436673644001197, |
|
"loss": 2.0039, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.8567057623661397, |
|
"grad_norm": 0.28797486424446106, |
|
"learning_rate": 0.00012405249563662537, |
|
"loss": 1.5982, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.858745537990821, |
|
"grad_norm": 0.32951340079307556, |
|
"learning_rate": 0.00012373800253394102, |
|
"loss": 1.6121, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8607853136155023, |
|
"grad_norm": 0.28868240118026733, |
|
"learning_rate": 0.00012342326043084266, |
|
"loss": 1.7456, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8628250892401835, |
|
"grad_norm": 0.3051716387271881, |
|
"learning_rate": 0.00012310827262882615, |
|
"loss": 1.7698, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.8648648648648649, |
|
"grad_norm": 0.3326285183429718, |
|
"learning_rate": 0.00012279304243196436, |
|
"loss": 1.7834, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.8669046404895462, |
|
"grad_norm": 0.3435324430465698, |
|
"learning_rate": 0.00012247757314687297, |
|
"loss": 1.7208, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8689444161142275, |
|
"grad_norm": 0.35181501507759094, |
|
"learning_rate": 0.00012216186808267546, |
|
"loss": 1.7401, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.8709841917389087, |
|
"grad_norm": 0.30371034145355225, |
|
"learning_rate": 0.00012184593055096854, |
|
"loss": 1.655, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.87302396736359, |
|
"grad_norm": 0.29926905035972595, |
|
"learning_rate": 0.0001215297638657875, |
|
"loss": 1.686, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.8750637429882713, |
|
"grad_norm": 0.32623806595802307, |
|
"learning_rate": 0.0001212133713435712, |
|
"loss": 1.6556, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.8771035186129525, |
|
"grad_norm": 0.32232004404067993, |
|
"learning_rate": 0.00012089675630312754, |
|
"loss": 1.7736, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8791432942376338, |
|
"grad_norm": 0.3392092287540436, |
|
"learning_rate": 0.00012057992206559837, |
|
"loss": 1.8754, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.8811830698623151, |
|
"grad_norm": 0.34549203515052795, |
|
"learning_rate": 0.00012026287195442503, |
|
"loss": 1.7624, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.8832228454869965, |
|
"grad_norm": 0.3612309694290161, |
|
"learning_rate": 0.00011994560929531309, |
|
"loss": 1.6408, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.8852626211116777, |
|
"grad_norm": 0.30541735887527466, |
|
"learning_rate": 0.00011962813741619777, |
|
"loss": 1.6785, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.887302396736359, |
|
"grad_norm": 0.2894171476364136, |
|
"learning_rate": 0.00011931045964720881, |
|
"loss": 1.6609, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8893421723610403, |
|
"grad_norm": 0.31241005659103394, |
|
"learning_rate": 0.0001189925793206357, |
|
"loss": 1.7421, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8913819479857216, |
|
"grad_norm": 0.36774688959121704, |
|
"learning_rate": 0.00011867449977089265, |
|
"loss": 1.902, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8934217236104028, |
|
"grad_norm": 0.3658794164657593, |
|
"learning_rate": 0.00011835622433448361, |
|
"loss": 1.9666, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8954614992350841, |
|
"grad_norm": 0.33153945207595825, |
|
"learning_rate": 0.00011803775634996734, |
|
"loss": 1.6772, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8975012748597654, |
|
"grad_norm": 0.34272125363349915, |
|
"learning_rate": 0.0001177190991579223, |
|
"loss": 1.7911, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8995410504844467, |
|
"grad_norm": 0.3852793276309967, |
|
"learning_rate": 0.00011740025610091159, |
|
"loss": 1.7358, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.901580826109128, |
|
"grad_norm": 0.33693379163742065, |
|
"learning_rate": 0.00011708123052344804, |
|
"loss": 1.7078, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.9036206017338093, |
|
"grad_norm": 0.35405832529067993, |
|
"learning_rate": 0.00011676202577195901, |
|
"loss": 1.6454, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.9056603773584906, |
|
"grad_norm": 0.38638654351234436, |
|
"learning_rate": 0.0001164426451947513, |
|
"loss": 1.7611, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.9077001529831719, |
|
"grad_norm": 0.3415047824382782, |
|
"learning_rate": 0.00011612309214197599, |
|
"loss": 1.6206, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9097399286078531, |
|
"grad_norm": 0.3022047281265259, |
|
"learning_rate": 0.00011580336996559343, |
|
"loss": 1.8017, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.9117797042325344, |
|
"grad_norm": 0.3650202751159668, |
|
"learning_rate": 0.00011548348201933798, |
|
"loss": 1.6507, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.9138194798572157, |
|
"grad_norm": 0.44297119975090027, |
|
"learning_rate": 0.00011516343165868279, |
|
"loss": 1.8607, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.915859255481897, |
|
"grad_norm": 0.3508884906768799, |
|
"learning_rate": 0.00011484322224080472, |
|
"loss": 1.6549, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.9178990311065782, |
|
"grad_norm": 0.31238260865211487, |
|
"learning_rate": 0.00011452285712454904, |
|
"loss": 1.603, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9199388067312596, |
|
"grad_norm": 0.36678579449653625, |
|
"learning_rate": 0.00011420233967039422, |
|
"loss": 1.7984, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.9219785823559409, |
|
"grad_norm": 0.3467310965061188, |
|
"learning_rate": 0.00011388167324041669, |
|
"loss": 1.7691, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.9240183579806222, |
|
"grad_norm": 0.2987074553966522, |
|
"learning_rate": 0.00011356086119825553, |
|
"loss": 1.617, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.9260581336053034, |
|
"grad_norm": 0.2808781862258911, |
|
"learning_rate": 0.00011323990690907733, |
|
"loss": 1.6362, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.9280979092299847, |
|
"grad_norm": 0.356692910194397, |
|
"learning_rate": 0.00011291881373954065, |
|
"loss": 1.7279, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.930137684854666, |
|
"grad_norm": 0.3478505611419678, |
|
"learning_rate": 0.00011259758505776092, |
|
"loss": 1.6011, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.9321774604793472, |
|
"grad_norm": 0.3229358196258545, |
|
"learning_rate": 0.00011227622423327502, |
|
"loss": 1.7874, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.9342172361040285, |
|
"grad_norm": 0.30234581232070923, |
|
"learning_rate": 0.0001119547346370059, |
|
"loss": 1.6098, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.9362570117287098, |
|
"grad_norm": 0.356154203414917, |
|
"learning_rate": 0.00011163311964122734, |
|
"loss": 1.8164, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.9382967873533912, |
|
"grad_norm": 0.35306453704833984, |
|
"learning_rate": 0.00011131138261952845, |
|
"loss": 1.8833, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9403365629780724, |
|
"grad_norm": 0.29947206377983093, |
|
"learning_rate": 0.00011098952694677829, |
|
"loss": 1.6675, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.9423763386027537, |
|
"grad_norm": 0.3290257155895233, |
|
"learning_rate": 0.00011066755599909064, |
|
"loss": 1.7463, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.944416114227435, |
|
"grad_norm": 0.32332712411880493, |
|
"learning_rate": 0.00011034547315378838, |
|
"loss": 1.5748, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.9464558898521163, |
|
"grad_norm": 0.40129172801971436, |
|
"learning_rate": 0.00011002328178936811, |
|
"loss": 1.6484, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.9484956654767975, |
|
"grad_norm": 0.34289342164993286, |
|
"learning_rate": 0.00010970098528546481, |
|
"loss": 1.7533, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.9505354411014788, |
|
"grad_norm": 0.30435630679130554, |
|
"learning_rate": 0.00010937858702281631, |
|
"loss": 1.8037, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.9525752167261601, |
|
"grad_norm": 0.29155558347702026, |
|
"learning_rate": 0.00010905609038322779, |
|
"loss": 1.5628, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.9546149923508415, |
|
"grad_norm": 0.31822967529296875, |
|
"learning_rate": 0.0001087334987495364, |
|
"loss": 1.7222, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.9566547679755227, |
|
"grad_norm": 0.2868748605251312, |
|
"learning_rate": 0.00010841081550557578, |
|
"loss": 1.86, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.958694543600204, |
|
"grad_norm": 0.3066910207271576, |
|
"learning_rate": 0.00010808804403614043, |
|
"loss": 1.7278, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9607343192248853, |
|
"grad_norm": 0.28570494055747986, |
|
"learning_rate": 0.00010776518772695034, |
|
"loss": 1.7328, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.9627740948495666, |
|
"grad_norm": 0.28472283482551575, |
|
"learning_rate": 0.0001074422499646154, |
|
"loss": 1.7311, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.9648138704742478, |
|
"grad_norm": 0.30987489223480225, |
|
"learning_rate": 0.00010711923413659995, |
|
"loss": 1.7628, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.9668536460989291, |
|
"grad_norm": 0.32321545481681824, |
|
"learning_rate": 0.00010679614363118717, |
|
"loss": 1.8949, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.9688934217236104, |
|
"grad_norm": 0.32413583993911743, |
|
"learning_rate": 0.00010647298183744359, |
|
"loss": 1.6379, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9709331973482916, |
|
"grad_norm": 0.3008846044540405, |
|
"learning_rate": 0.0001061497521451835, |
|
"loss": 1.7546, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.972972972972973, |
|
"grad_norm": 0.3248610198497772, |
|
"learning_rate": 0.00010582645794493337, |
|
"loss": 1.8915, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.9750127485976543, |
|
"grad_norm": 0.3182532489299774, |
|
"learning_rate": 0.00010550310262789649, |
|
"loss": 1.8771, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.9770525242223356, |
|
"grad_norm": 0.317730188369751, |
|
"learning_rate": 0.00010517968958591705, |
|
"loss": 1.7348, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.9790922998470168, |
|
"grad_norm": 0.3585616648197174, |
|
"learning_rate": 0.00010485622221144484, |
|
"loss": 1.6916, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9811320754716981, |
|
"grad_norm": 0.2927946150302887, |
|
"learning_rate": 0.00010453270389749957, |
|
"loss": 1.7284, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.9831718510963794, |
|
"grad_norm": 0.2869007885456085, |
|
"learning_rate": 0.00010420913803763521, |
|
"loss": 1.7312, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.9852116267210607, |
|
"grad_norm": 0.34274759888648987, |
|
"learning_rate": 0.00010388552802590462, |
|
"loss": 1.6405, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.9872514023457419, |
|
"grad_norm": 0.3203391134738922, |
|
"learning_rate": 0.00010356187725682359, |
|
"loss": 1.666, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.9892911779704232, |
|
"grad_norm": 0.3370297849178314, |
|
"learning_rate": 0.00010323818912533561, |
|
"loss": 1.757, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9913309535951046, |
|
"grad_norm": 0.3095087707042694, |
|
"learning_rate": 0.00010291446702677599, |
|
"loss": 1.7015, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9933707292197859, |
|
"grad_norm": 0.33249640464782715, |
|
"learning_rate": 0.00010259071435683636, |
|
"loss": 1.9363, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.9954105048444671, |
|
"grad_norm": 0.3441324830055237, |
|
"learning_rate": 0.000102266934511529, |
|
"loss": 1.5975, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9974502804691484, |
|
"grad_norm": 0.31439101696014404, |
|
"learning_rate": 0.00010194313088715135, |
|
"loss": 1.7025, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.9994900560938297, |
|
"grad_norm": 0.34762027859687805, |
|
"learning_rate": 0.00010161930688025017, |
|
"loss": 1.7399, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9994900560938297, |
|
"eval_loss": 1.752766489982605, |
|
"eval_runtime": 57.5155, |
|
"eval_samples_per_second": 14.361, |
|
"eval_steps_per_second": 1.808, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.001529831718511, |
|
"grad_norm": 0.31156283617019653, |
|
"learning_rate": 0.00010129546588758605, |
|
"loss": 1.6866, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.0035696073431923, |
|
"grad_norm": 0.31161120533943176, |
|
"learning_rate": 0.00010097161130609773, |
|
"loss": 1.6071, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.0056093829678736, |
|
"grad_norm": 0.3198174834251404, |
|
"learning_rate": 0.00010064774653286661, |
|
"loss": 1.5485, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.0076491585925549, |
|
"grad_norm": 0.30601778626441956, |
|
"learning_rate": 0.00010032387496508089, |
|
"loss": 1.7225, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.0096889342172362, |
|
"grad_norm": 0.3010168969631195, |
|
"learning_rate": 0.0001, |
|
"loss": 1.5471, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.0117287098419174, |
|
"grad_norm": 0.33606722950935364, |
|
"learning_rate": 9.967612503491914e-05, |
|
"loss": 1.941, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.0137684854665987, |
|
"grad_norm": 0.3398125469684601, |
|
"learning_rate": 9.935225346713341e-05, |
|
"loss": 1.6347, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.01580826109128, |
|
"grad_norm": 0.3129066228866577, |
|
"learning_rate": 9.902838869390229e-05, |
|
"loss": 1.7006, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.0178480367159612, |
|
"grad_norm": 0.31506016850471497, |
|
"learning_rate": 9.870453411241399e-05, |
|
"loss": 1.6346, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.0198878123406425, |
|
"grad_norm": 0.3370596766471863, |
|
"learning_rate": 9.838069311974986e-05, |
|
"loss": 1.5556, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.0219275879653238, |
|
"grad_norm": 0.34100475907325745, |
|
"learning_rate": 9.805686911284868e-05, |
|
"loss": 1.5844, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.023967363590005, |
|
"grad_norm": 0.3352638781070709, |
|
"learning_rate": 9.7733065488471e-05, |
|
"loss": 1.7705, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.0260071392146863, |
|
"grad_norm": 0.29297593235969543, |
|
"learning_rate": 9.740928564316368e-05, |
|
"loss": 1.5959, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.0280469148393676, |
|
"grad_norm": 0.30612438917160034, |
|
"learning_rate": 9.708553297322406e-05, |
|
"loss": 1.5619, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.0300866904640489, |
|
"grad_norm": 0.32929760217666626, |
|
"learning_rate": 9.676181087466444e-05, |
|
"loss": 1.6273, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.0321264660887302, |
|
"grad_norm": 0.3363798260688782, |
|
"learning_rate": 9.643812274317644e-05, |
|
"loss": 1.5784, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.0341662417134114, |
|
"grad_norm": 0.3519752025604248, |
|
"learning_rate": 9.611447197409543e-05, |
|
"loss": 1.6152, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.0362060173380927, |
|
"grad_norm": 0.2884860038757324, |
|
"learning_rate": 9.579086196236482e-05, |
|
"loss": 1.6408, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.0382457929627742, |
|
"grad_norm": 0.30348899960517883, |
|
"learning_rate": 9.54672961025005e-05, |
|
"loss": 1.6188, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.0402855685874555, |
|
"grad_norm": 0.3200111985206604, |
|
"learning_rate": 9.514377778855521e-05, |
|
"loss": 1.8469, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.0423253442121367, |
|
"grad_norm": 0.3585512340068817, |
|
"learning_rate": 9.482031041408296e-05, |
|
"loss": 1.5236, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.044365119836818, |
|
"grad_norm": 0.33769890666007996, |
|
"learning_rate": 9.449689737210352e-05, |
|
"loss": 1.5718, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.0464048954614993, |
|
"grad_norm": 0.3171827793121338, |
|
"learning_rate": 9.417354205506663e-05, |
|
"loss": 1.6292, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.0484446710861806, |
|
"grad_norm": 0.30314505100250244, |
|
"learning_rate": 9.385024785481654e-05, |
|
"loss": 1.7828, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.0504844467108618, |
|
"grad_norm": 0.3583092987537384, |
|
"learning_rate": 9.352701816255643e-05, |
|
"loss": 1.7287, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.052524222335543, |
|
"grad_norm": 0.3360598087310791, |
|
"learning_rate": 9.320385636881283e-05, |
|
"loss": 1.5975, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.0545639979602244, |
|
"grad_norm": 0.31122949719429016, |
|
"learning_rate": 9.288076586340006e-05, |
|
"loss": 1.64, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.0566037735849056, |
|
"grad_norm": 0.32964256405830383, |
|
"learning_rate": 9.255775003538462e-05, |
|
"loss": 1.6612, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.058643549209587, |
|
"grad_norm": 0.3330499529838562, |
|
"learning_rate": 9.223481227304968e-05, |
|
"loss": 1.4896, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.0606833248342682, |
|
"grad_norm": 0.3136623799800873, |
|
"learning_rate": 9.19119559638596e-05, |
|
"loss": 1.4403, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.0627231004589495, |
|
"grad_norm": 0.305698037147522, |
|
"learning_rate": 9.158918449442423e-05, |
|
"loss": 1.6269, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.0647628760836307, |
|
"grad_norm": 0.311132550239563, |
|
"learning_rate": 9.126650125046361e-05, |
|
"loss": 1.5817, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.066802651708312, |
|
"grad_norm": 0.32102203369140625, |
|
"learning_rate": 9.094390961677223e-05, |
|
"loss": 1.6621, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.0688424273329933, |
|
"grad_norm": 0.29032936692237854, |
|
"learning_rate": 9.062141297718371e-05, |
|
"loss": 1.6658, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.0708822029576746, |
|
"grad_norm": 0.3275803327560425, |
|
"learning_rate": 9.02990147145352e-05, |
|
"loss": 1.7366, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.072921978582356, |
|
"grad_norm": 0.32101571559906006, |
|
"learning_rate": 8.997671821063191e-05, |
|
"loss": 1.4678, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0749617542070373, |
|
"grad_norm": 0.34086906909942627, |
|
"learning_rate": 8.965452684621164e-05, |
|
"loss": 1.851, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0770015298317186, |
|
"grad_norm": 0.3514576554298401, |
|
"learning_rate": 8.933244400090937e-05, |
|
"loss": 1.5987, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.0790413054563999, |
|
"grad_norm": 0.37968167662620544, |
|
"learning_rate": 8.901047305322172e-05, |
|
"loss": 1.7313, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.0810810810810811, |
|
"grad_norm": 0.31161201000213623, |
|
"learning_rate": 8.868861738047158e-05, |
|
"loss": 1.4166, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0831208567057624, |
|
"grad_norm": 0.2926504909992218, |
|
"learning_rate": 8.836688035877267e-05, |
|
"loss": 1.5615, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.0851606323304437, |
|
"grad_norm": 0.3290373980998993, |
|
"learning_rate": 8.804526536299413e-05, |
|
"loss": 1.5896, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.087200407955125, |
|
"grad_norm": 0.36070388555526733, |
|
"learning_rate": 8.772377576672502e-05, |
|
"loss": 1.6017, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.0892401835798062, |
|
"grad_norm": 0.29602184891700745, |
|
"learning_rate": 8.740241494223911e-05, |
|
"loss": 1.5118, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.0912799592044875, |
|
"grad_norm": 0.3364042341709137, |
|
"learning_rate": 8.70811862604594e-05, |
|
"loss": 1.5733, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.0933197348291688, |
|
"grad_norm": 0.35308852791786194, |
|
"learning_rate": 8.676009309092272e-05, |
|
"loss": 1.5998, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.09535951045385, |
|
"grad_norm": 0.3395770788192749, |
|
"learning_rate": 8.643913880174448e-05, |
|
"loss": 1.6643, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.0973992860785313, |
|
"grad_norm": 0.3173401653766632, |
|
"learning_rate": 8.611832675958336e-05, |
|
"loss": 1.5605, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.0994390617032126, |
|
"grad_norm": 0.3106042444705963, |
|
"learning_rate": 8.579766032960582e-05, |
|
"loss": 1.5107, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.1014788373278939, |
|
"grad_norm": 0.3313981890678406, |
|
"learning_rate": 8.5477142875451e-05, |
|
"loss": 1.5485, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.1035186129525751, |
|
"grad_norm": 0.36504775285720825, |
|
"learning_rate": 8.515677775919527e-05, |
|
"loss": 1.6587, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.1055583885772564, |
|
"grad_norm": 0.325743705034256, |
|
"learning_rate": 8.48365683413172e-05, |
|
"loss": 1.5985, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.1075981642019377, |
|
"grad_norm": 0.3341684341430664, |
|
"learning_rate": 8.451651798066203e-05, |
|
"loss": 1.5058, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.109637939826619, |
|
"grad_norm": 0.3337576389312744, |
|
"learning_rate": 8.419663003440657e-05, |
|
"loss": 1.6246, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.1116777154513005, |
|
"grad_norm": 0.3710947334766388, |
|
"learning_rate": 8.387690785802402e-05, |
|
"loss": 1.6237, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.1137174910759817, |
|
"grad_norm": 0.3338005840778351, |
|
"learning_rate": 8.355735480524874e-05, |
|
"loss": 1.6597, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.115757266700663, |
|
"grad_norm": 0.37434127926826477, |
|
"learning_rate": 8.323797422804099e-05, |
|
"loss": 1.5366, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.1177970423253443, |
|
"grad_norm": 0.31900352239608765, |
|
"learning_rate": 8.291876947655196e-05, |
|
"loss": 1.8195, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.1198368179500255, |
|
"grad_norm": 0.34681376814842224, |
|
"learning_rate": 8.259974389908842e-05, |
|
"loss": 1.6413, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.1218765935747068, |
|
"grad_norm": 0.3127409517765045, |
|
"learning_rate": 8.228090084207774e-05, |
|
"loss": 1.7019, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.123916369199388, |
|
"grad_norm": 0.3269082307815552, |
|
"learning_rate": 8.196224365003267e-05, |
|
"loss": 1.6642, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.1259561448240694, |
|
"grad_norm": 0.3357929289340973, |
|
"learning_rate": 8.16437756655164e-05, |
|
"loss": 1.515, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.1279959204487506, |
|
"grad_norm": 0.3477891683578491, |
|
"learning_rate": 8.132550022910737e-05, |
|
"loss": 1.5411, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.130035696073432, |
|
"grad_norm": 0.3414022624492645, |
|
"learning_rate": 8.100742067936431e-05, |
|
"loss": 1.5473, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.1320754716981132, |
|
"grad_norm": 0.3490772247314453, |
|
"learning_rate": 8.068954035279121e-05, |
|
"loss": 1.6757, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.1341152473227945, |
|
"grad_norm": 0.3200000524520874, |
|
"learning_rate": 8.037186258380226e-05, |
|
"loss": 1.5958, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.1361550229474757, |
|
"grad_norm": 0.336568146944046, |
|
"learning_rate": 8.005439070468692e-05, |
|
"loss": 1.7993, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.138194798572157, |
|
"grad_norm": 0.30839619040489197, |
|
"learning_rate": 7.973712804557501e-05, |
|
"loss": 1.6876, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.1402345741968383, |
|
"grad_norm": 0.34785008430480957, |
|
"learning_rate": 7.942007793440164e-05, |
|
"loss": 1.5183, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.1422743498215195, |
|
"grad_norm": 0.32502883672714233, |
|
"learning_rate": 7.91032436968725e-05, |
|
"loss": 1.6198, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.144314125446201, |
|
"grad_norm": 0.34975382685661316, |
|
"learning_rate": 7.878662865642881e-05, |
|
"loss": 1.8394, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.1463539010708823, |
|
"grad_norm": 0.33375632762908936, |
|
"learning_rate": 7.847023613421251e-05, |
|
"loss": 1.5781, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.1483936766955636, |
|
"grad_norm": 0.3376395106315613, |
|
"learning_rate": 7.815406944903147e-05, |
|
"loss": 1.6032, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.1504334523202449, |
|
"grad_norm": 0.3409046530723572, |
|
"learning_rate": 7.78381319173246e-05, |
|
"loss": 1.6174, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.1524732279449261, |
|
"grad_norm": 0.3230804204940796, |
|
"learning_rate": 7.75224268531271e-05, |
|
"loss": 1.5616, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.1545130035696074, |
|
"grad_norm": 0.3072092831134796, |
|
"learning_rate": 7.72069575680357e-05, |
|
"loss": 1.6302, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.1565527791942887, |
|
"grad_norm": 0.32646802067756653, |
|
"learning_rate": 7.689172737117389e-05, |
|
"loss": 1.6233, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.15859255481897, |
|
"grad_norm": 0.3212278187274933, |
|
"learning_rate": 7.657673956915735e-05, |
|
"loss": 1.5473, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.1606323304436512, |
|
"grad_norm": 0.3245629668235779, |
|
"learning_rate": 7.626199746605903e-05, |
|
"loss": 1.5801, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.1626721060683325, |
|
"grad_norm": 0.35144391655921936, |
|
"learning_rate": 7.594750436337467e-05, |
|
"loss": 1.4983, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.1647118816930138, |
|
"grad_norm": 0.3057266175746918, |
|
"learning_rate": 7.563326355998803e-05, |
|
"loss": 1.6002, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.166751657317695, |
|
"grad_norm": 0.33646106719970703, |
|
"learning_rate": 7.531927835213656e-05, |
|
"loss": 1.5237, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.1687914329423763, |
|
"grad_norm": 0.31540170311927795, |
|
"learning_rate": 7.500555203337647e-05, |
|
"loss": 1.6638, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.1708312085670576, |
|
"grad_norm": 0.35494086146354675, |
|
"learning_rate": 7.469208789454838e-05, |
|
"loss": 1.6821, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.1728709841917389, |
|
"grad_norm": 0.3381110727787018, |
|
"learning_rate": 7.437888922374276e-05, |
|
"loss": 1.7844, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.1749107598164201, |
|
"grad_norm": 0.33230093121528625, |
|
"learning_rate": 7.40659593062655e-05, |
|
"loss": 1.669, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.1769505354411014, |
|
"grad_norm": 0.3445635735988617, |
|
"learning_rate": 7.37533014246033e-05, |
|
"loss": 1.6158, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.1789903110657827, |
|
"grad_norm": 0.310813307762146, |
|
"learning_rate": 7.344091885838948e-05, |
|
"loss": 1.6848, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.181030086690464, |
|
"grad_norm": 0.3245631754398346, |
|
"learning_rate": 7.312881488436927e-05, |
|
"loss": 1.6653, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.1830698623151452, |
|
"grad_norm": 0.33641543984413147, |
|
"learning_rate": 7.281699277636572e-05, |
|
"loss": 1.6324, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.1851096379398267, |
|
"grad_norm": 0.3748520016670227, |
|
"learning_rate": 7.250545580524515e-05, |
|
"loss": 1.7024, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.187149413564508, |
|
"grad_norm": 0.3528485894203186, |
|
"learning_rate": 7.2194207238883e-05, |
|
"loss": 1.6821, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.1891891891891893, |
|
"grad_norm": 0.3142973780632019, |
|
"learning_rate": 7.188325034212943e-05, |
|
"loss": 1.7549, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.1912289648138705, |
|
"grad_norm": 0.33410054445266724, |
|
"learning_rate": 7.157258837677514e-05, |
|
"loss": 1.8369, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.1932687404385518, |
|
"grad_norm": 0.3428086042404175, |
|
"learning_rate": 7.126222460151719e-05, |
|
"loss": 1.603, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.195308516063233, |
|
"grad_norm": 0.3166033625602722, |
|
"learning_rate": 7.095216227192467e-05, |
|
"loss": 1.6375, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.1973482916879143, |
|
"grad_norm": 0.29628151655197144, |
|
"learning_rate": 7.064240464040473e-05, |
|
"loss": 1.3848, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.1993880673125956, |
|
"grad_norm": 0.3217509090900421, |
|
"learning_rate": 7.033295495616834e-05, |
|
"loss": 1.5988, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.201427842937277, |
|
"grad_norm": 0.3184705078601837, |
|
"learning_rate": 7.002381646519625e-05, |
|
"loss": 1.7149, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.2034676185619582, |
|
"grad_norm": 0.3363349139690399, |
|
"learning_rate": 6.971499241020495e-05, |
|
"loss": 1.6204, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.2055073941866394, |
|
"grad_norm": 0.35250142216682434, |
|
"learning_rate": 6.940648603061263e-05, |
|
"loss": 1.7362, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.2075471698113207, |
|
"grad_norm": 0.32162901759147644, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 1.5443, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.209586945436002, |
|
"grad_norm": 0.3331109881401062, |
|
"learning_rate": 6.879043923860257e-05, |
|
"loss": 1.6554, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.2116267210606833, |
|
"grad_norm": 0.3188980519771576, |
|
"learning_rate": 6.848290528822416e-05, |
|
"loss": 1.657, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.2136664966853645, |
|
"grad_norm": 0.33444222807884216, |
|
"learning_rate": 6.817570193725564e-05, |
|
"loss": 1.5265, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.2157062723100458, |
|
"grad_norm": 0.3025628626346588, |
|
"learning_rate": 6.786883240811479e-05, |
|
"loss": 1.5125, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.2177460479347273, |
|
"grad_norm": 0.3270890712738037, |
|
"learning_rate": 6.756229991971779e-05, |
|
"loss": 1.7311, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.2197858235594086, |
|
"grad_norm": 0.34401583671569824, |
|
"learning_rate": 6.725610768744534e-05, |
|
"loss": 1.6609, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.2218255991840898, |
|
"grad_norm": 0.3244417905807495, |
|
"learning_rate": 6.695025892310914e-05, |
|
"loss": 1.71, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.2238653748087711, |
|
"grad_norm": 0.3126024901866913, |
|
"learning_rate": 6.664475683491796e-05, |
|
"loss": 1.7029, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.2259051504334524, |
|
"grad_norm": 0.3367353677749634, |
|
"learning_rate": 6.633960462744416e-05, |
|
"loss": 1.6139, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.2279449260581337, |
|
"grad_norm": 0.3355925679206848, |
|
"learning_rate": 6.603480550158995e-05, |
|
"loss": 1.5975, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.229984701682815, |
|
"grad_norm": 0.3236278295516968, |
|
"learning_rate": 6.5730362654554e-05, |
|
"loss": 1.5147, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.2320244773074962, |
|
"grad_norm": 0.335346519947052, |
|
"learning_rate": 6.542627927979771e-05, |
|
"loss": 1.4939, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.2340642529321775, |
|
"grad_norm": 0.30104824900627136, |
|
"learning_rate": 6.512255856701177e-05, |
|
"loss": 1.6025, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.2361040285568587, |
|
"grad_norm": 0.32115882635116577, |
|
"learning_rate": 6.481920370208274e-05, |
|
"loss": 1.5471, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.23814380418154, |
|
"grad_norm": 0.33474794030189514, |
|
"learning_rate": 6.451621786705962e-05, |
|
"loss": 1.5468, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.2401835798062213, |
|
"grad_norm": 0.3239012062549591, |
|
"learning_rate": 6.42136042401204e-05, |
|
"loss": 1.8344, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.2422233554309026, |
|
"grad_norm": 0.3667883574962616, |
|
"learning_rate": 6.39113659955389e-05, |
|
"loss": 1.5429, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.2442631310555838, |
|
"grad_norm": 0.32810983061790466, |
|
"learning_rate": 6.360950630365126e-05, |
|
"loss": 1.6717, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.246302906680265, |
|
"grad_norm": 0.3319525420665741, |
|
"learning_rate": 6.330802833082279e-05, |
|
"loss": 1.5021, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.2483426823049464, |
|
"grad_norm": 0.3360360264778137, |
|
"learning_rate": 6.300693523941482e-05, |
|
"loss": 1.464, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.2503824579296277, |
|
"grad_norm": 0.3125949203968048, |
|
"learning_rate": 6.270623018775135e-05, |
|
"loss": 1.5397, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.252422233554309, |
|
"grad_norm": 0.30626389384269714, |
|
"learning_rate": 6.24059163300861e-05, |
|
"loss": 1.791, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.2544620091789902, |
|
"grad_norm": 0.3332500159740448, |
|
"learning_rate": 6.210599681656933e-05, |
|
"loss": 1.5316, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.2565017848036715, |
|
"grad_norm": 0.3248031735420227, |
|
"learning_rate": 6.180647479321485e-05, |
|
"loss": 1.4547, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.2585415604283527, |
|
"grad_norm": 0.29785528779029846, |
|
"learning_rate": 6.15073534018669e-05, |
|
"loss": 1.6137, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.2605813360530342, |
|
"grad_norm": 0.3345397412776947, |
|
"learning_rate": 6.120863578016735e-05, |
|
"loss": 1.6357, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.2626211116777155, |
|
"grad_norm": 0.323854923248291, |
|
"learning_rate": 6.091032506152274e-05, |
|
"loss": 1.5605, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.2646608873023968, |
|
"grad_norm": 0.343605637550354, |
|
"learning_rate": 6.061242437507131e-05, |
|
"loss": 1.6101, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.266700662927078, |
|
"grad_norm": 0.35858821868896484, |
|
"learning_rate": 6.031493684565029e-05, |
|
"loss": 1.6113, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.2687404385517593, |
|
"grad_norm": 0.34025838971138, |
|
"learning_rate": 6.00178655937631e-05, |
|
"loss": 1.4651, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.2707802141764406, |
|
"grad_norm": 0.33670589327812195, |
|
"learning_rate": 5.972121373554664e-05, |
|
"loss": 1.5778, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.2728199898011219, |
|
"grad_norm": 0.33113864064216614, |
|
"learning_rate": 5.942498438273849e-05, |
|
"loss": 1.6172, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.2748597654258031, |
|
"grad_norm": 0.3496561646461487, |
|
"learning_rate": 5.9129180642644414e-05, |
|
"loss": 1.5233, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.2768995410504844, |
|
"grad_norm": 0.35048961639404297, |
|
"learning_rate": 5.883380561810563e-05, |
|
"loss": 1.7357, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.2789393166751657, |
|
"grad_norm": 0.36025702953338623, |
|
"learning_rate": 5.8538862407466425e-05, |
|
"loss": 1.6267, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.280979092299847, |
|
"grad_norm": 0.3353872001171112, |
|
"learning_rate": 5.82443541045415e-05, |
|
"loss": 1.6597, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.2830188679245282, |
|
"grad_norm": 0.3378039002418518, |
|
"learning_rate": 5.795028379858355e-05, |
|
"loss": 1.6192, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.2850586435492095, |
|
"grad_norm": 0.3405285179615021, |
|
"learning_rate": 5.765665457425102e-05, |
|
"loss": 1.6515, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.287098419173891, |
|
"grad_norm": 0.3546642065048218, |
|
"learning_rate": 5.736346951157544e-05, |
|
"loss": 1.6763, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.2891381947985723, |
|
"grad_norm": 0.35471004247665405, |
|
"learning_rate": 5.707073168592942e-05, |
|
"loss": 1.6143, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.2911779704232536, |
|
"grad_norm": 0.34378281235694885, |
|
"learning_rate": 5.677844416799424e-05, |
|
"loss": 1.5564, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.2932177460479348, |
|
"grad_norm": 0.31412938237190247, |
|
"learning_rate": 5.648661002372768e-05, |
|
"loss": 1.523, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.295257521672616, |
|
"grad_norm": 0.4044433534145355, |
|
"learning_rate": 5.6195232314331766e-05, |
|
"loss": 1.8443, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.2972972972972974, |
|
"grad_norm": 0.33771705627441406, |
|
"learning_rate": 5.590431409622081e-05, |
|
"loss": 1.5658, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.2993370729219786, |
|
"grad_norm": 0.38285380601882935, |
|
"learning_rate": 5.56138584209893e-05, |
|
"loss": 1.6984, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.30137684854666, |
|
"grad_norm": 0.34205278754234314, |
|
"learning_rate": 5.532386833537977e-05, |
|
"loss": 1.5203, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.3034166241713412, |
|
"grad_norm": 0.32206758856773376, |
|
"learning_rate": 5.503434688125104e-05, |
|
"loss": 1.5948, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.3054563997960225, |
|
"grad_norm": 0.3362283706665039, |
|
"learning_rate": 5.474529709554612e-05, |
|
"loss": 1.7107, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.3074961754207037, |
|
"grad_norm": 0.3668416142463684, |
|
"learning_rate": 5.445672201026054e-05, |
|
"loss": 1.6136, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.309535951045385, |
|
"grad_norm": 0.35234856605529785, |
|
"learning_rate": 5.416862465241033e-05, |
|
"loss": 1.4468, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.3115757266700663, |
|
"grad_norm": 0.34221404790878296, |
|
"learning_rate": 5.388100804400049e-05, |
|
"loss": 1.6279, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.3136155022947476, |
|
"grad_norm": 0.30184850096702576, |
|
"learning_rate": 5.3593875201993174e-05, |
|
"loss": 1.5824, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.3156552779194288, |
|
"grad_norm": 0.35658925771713257, |
|
"learning_rate": 5.3307229138275936e-05, |
|
"loss": 1.7841, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.31769505354411, |
|
"grad_norm": 0.3517902195453644, |
|
"learning_rate": 5.302107285963045e-05, |
|
"loss": 1.537, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.3197348291687914, |
|
"grad_norm": 0.33700263500213623, |
|
"learning_rate": 5.273540936770058e-05, |
|
"loss": 1.6779, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.3217746047934726, |
|
"grad_norm": 0.37334513664245605, |
|
"learning_rate": 5.245024165896126e-05, |
|
"loss": 1.657, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.323814380418154, |
|
"grad_norm": 0.33209171891212463, |
|
"learning_rate": 5.2165572724686754e-05, |
|
"loss": 1.5466, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.3258541560428352, |
|
"grad_norm": 0.3467157483100891, |
|
"learning_rate": 5.1881405550919493e-05, |
|
"loss": 1.8377, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.3278939316675165, |
|
"grad_norm": 0.3575190305709839, |
|
"learning_rate": 5.1597743118438726e-05, |
|
"loss": 1.6033, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.3299337072921977, |
|
"grad_norm": 0.3690027594566345, |
|
"learning_rate": 5.1314588402729044e-05, |
|
"loss": 1.7233, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.3319734829168792, |
|
"grad_norm": 0.38401731848716736, |
|
"learning_rate": 5.103194437394952e-05, |
|
"loss": 1.5839, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.3340132585415605, |
|
"grad_norm": 0.3264882266521454, |
|
"learning_rate": 5.074981399690218e-05, |
|
"loss": 1.5094, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.3360530341662418, |
|
"grad_norm": 0.32658815383911133, |
|
"learning_rate": 5.0468200231001286e-05, |
|
"loss": 1.775, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.338092809790923, |
|
"grad_norm": 0.3544069230556488, |
|
"learning_rate": 5.018710603024187e-05, |
|
"loss": 1.6619, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.3401325854156043, |
|
"grad_norm": 0.4113689661026001, |
|
"learning_rate": 4.9906534343169144e-05, |
|
"loss": 1.4896, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.3421723610402856, |
|
"grad_norm": 0.3639819324016571, |
|
"learning_rate": 4.962648811284738e-05, |
|
"loss": 1.9133, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.3442121366649669, |
|
"grad_norm": 0.3412756621837616, |
|
"learning_rate": 4.934697027682894e-05, |
|
"loss": 1.4732, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.3462519122896481, |
|
"grad_norm": 0.3357820510864258, |
|
"learning_rate": 4.9067983767123736e-05, |
|
"loss": 1.5121, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.3482916879143294, |
|
"grad_norm": 0.340202271938324, |
|
"learning_rate": 4.8789531510168163e-05, |
|
"loss": 1.4813, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.3503314635390107, |
|
"grad_norm": 0.3804778754711151, |
|
"learning_rate": 4.851161642679466e-05, |
|
"loss": 1.5581, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.352371239163692, |
|
"grad_norm": 0.3732028603553772, |
|
"learning_rate": 4.8234241432200965e-05, |
|
"loss": 1.6184, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.3544110147883732, |
|
"grad_norm": 0.33145490288734436, |
|
"learning_rate": 4.795740943591955e-05, |
|
"loss": 1.6271, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.3564507904130545, |
|
"grad_norm": 0.34722691774368286, |
|
"learning_rate": 4.768112334178699e-05, |
|
"loss": 1.4174, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.3584905660377358, |
|
"grad_norm": 0.3337624669075012, |
|
"learning_rate": 4.74053860479137e-05, |
|
"loss": 1.5257, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.3605303416624173, |
|
"grad_norm": 0.3500399887561798, |
|
"learning_rate": 4.7130200446653475e-05, |
|
"loss": 1.6007, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.3625701172870985, |
|
"grad_norm": 0.33266812562942505, |
|
"learning_rate": 4.6855569424572955e-05, |
|
"loss": 1.58, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.3646098929117798, |
|
"grad_norm": 0.33294910192489624, |
|
"learning_rate": 4.65814958624217e-05, |
|
"loss": 1.6312, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.366649668536461, |
|
"grad_norm": 0.34834083914756775, |
|
"learning_rate": 4.630798263510162e-05, |
|
"loss": 1.8115, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.3686894441611424, |
|
"grad_norm": 0.350946307182312, |
|
"learning_rate": 4.6035032611637094e-05, |
|
"loss": 1.4841, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.3707292197858236, |
|
"grad_norm": 0.3481386601924896, |
|
"learning_rate": 4.5762648655144666e-05, |
|
"loss": 1.8824, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.372768995410505, |
|
"grad_norm": 0.38010773062705994, |
|
"learning_rate": 4.549083362280317e-05, |
|
"loss": 1.6004, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.3748087710351862, |
|
"grad_norm": 0.34416502714157104, |
|
"learning_rate": 4.5219590365823714e-05, |
|
"loss": 1.6492, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.3768485466598674, |
|
"grad_norm": 0.35469838976860046, |
|
"learning_rate": 4.494892172941965e-05, |
|
"loss": 1.6195, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.3788883222845487, |
|
"grad_norm": 0.3326956033706665, |
|
"learning_rate": 4.467883055277695e-05, |
|
"loss": 1.7463, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.38092809790923, |
|
"grad_norm": 0.3437754511833191, |
|
"learning_rate": 4.440931966902418e-05, |
|
"loss": 1.5278, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.3829678735339113, |
|
"grad_norm": 0.3093564808368683, |
|
"learning_rate": 4.414039190520308e-05, |
|
"loss": 1.6568, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.3850076491585925, |
|
"grad_norm": 0.369819700717926, |
|
"learning_rate": 4.387205008223854e-05, |
|
"loss": 1.6104, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.3870474247832738, |
|
"grad_norm": 0.31055358052253723, |
|
"learning_rate": 4.360429701490934e-05, |
|
"loss": 1.5981, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.389087200407955, |
|
"grad_norm": 0.3369213044643402, |
|
"learning_rate": 4.333713551181852e-05, |
|
"loss": 1.5602, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.3911269760326364, |
|
"grad_norm": 0.3584446907043457, |
|
"learning_rate": 4.307056837536373e-05, |
|
"loss": 1.3992, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.3931667516573176, |
|
"grad_norm": 0.33131203055381775, |
|
"learning_rate": 4.2804598401708175e-05, |
|
"loss": 1.7085, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.395206527281999, |
|
"grad_norm": 0.3502407968044281, |
|
"learning_rate": 4.253922838075095e-05, |
|
"loss": 1.6791, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.3972463029066802, |
|
"grad_norm": 0.3351820409297943, |
|
"learning_rate": 4.227446109609809e-05, |
|
"loss": 1.7071, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3992860785313614, |
|
"grad_norm": 0.3804665505886078, |
|
"learning_rate": 4.2010299325033034e-05, |
|
"loss": 1.8445, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.4013258541560427, |
|
"grad_norm": 0.326779842376709, |
|
"learning_rate": 4.17467458384878e-05, |
|
"loss": 1.6168, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.403365629780724, |
|
"grad_norm": 0.3746296465396881, |
|
"learning_rate": 4.1483803401013796e-05, |
|
"loss": 1.522, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.4054054054054055, |
|
"grad_norm": 0.3150007724761963, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 1.5968, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.4074451810300868, |
|
"grad_norm": 0.33969178795814514, |
|
"learning_rate": 4.0959762699407766e-05, |
|
"loss": 1.6025, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.409484956654768, |
|
"grad_norm": 0.3613244593143463, |
|
"learning_rate": 4.0698669932214727e-05, |
|
"loss": 1.7332, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.4115247322794493, |
|
"grad_norm": 0.3509519398212433, |
|
"learning_rate": 4.043819920791322e-05, |
|
"loss": 1.5837, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.4135645079041306, |
|
"grad_norm": 0.37852659821510315, |
|
"learning_rate": 4.0178353258717804e-05, |
|
"loss": 1.6182, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.4156042835288118, |
|
"grad_norm": 0.36975494027137756, |
|
"learning_rate": 3.991913481028965e-05, |
|
"loss": 1.5078, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.4176440591534931, |
|
"grad_norm": 0.34122028946876526, |
|
"learning_rate": 3.966054658170754e-05, |
|
"loss": 1.6329, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.4196838347781744, |
|
"grad_norm": 0.3338213562965393, |
|
"learning_rate": 3.940259128543967e-05, |
|
"loss": 1.6355, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.4217236104028557, |
|
"grad_norm": 0.3279430866241455, |
|
"learning_rate": 3.9145271627314986e-05, |
|
"loss": 1.6395, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.423763386027537, |
|
"grad_norm": 0.3165960907936096, |
|
"learning_rate": 3.8888590306494974e-05, |
|
"loss": 1.4444, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.4258031616522182, |
|
"grad_norm": 0.3250195384025574, |
|
"learning_rate": 3.8632550015445256e-05, |
|
"loss": 1.5924, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.4278429372768995, |
|
"grad_norm": 0.3744489550590515, |
|
"learning_rate": 3.8377153439907266e-05, |
|
"loss": 1.6443, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.4298827129015808, |
|
"grad_norm": 0.34341734647750854, |
|
"learning_rate": 3.81224032588703e-05, |
|
"loss": 1.6492, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.4319224885262622, |
|
"grad_norm": 0.343795508146286, |
|
"learning_rate": 3.786830214454315e-05, |
|
"loss": 1.6409, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.4339622641509435, |
|
"grad_norm": 0.3446982800960541, |
|
"learning_rate": 3.7614852762326305e-05, |
|
"loss": 1.6905, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.4360020397756248, |
|
"grad_norm": 0.34508073329925537, |
|
"learning_rate": 3.736205777078381e-05, |
|
"loss": 1.5568, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.438041815400306, |
|
"grad_norm": 0.3523988425731659, |
|
"learning_rate": 3.710991982161555e-05, |
|
"loss": 1.6096, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.4400815910249873, |
|
"grad_norm": 0.3173058032989502, |
|
"learning_rate": 3.6858441559629306e-05, |
|
"loss": 1.507, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.4421213666496686, |
|
"grad_norm": 0.3678998053073883, |
|
"learning_rate": 3.6607625622713e-05, |
|
"loss": 1.5844, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.4441611422743499, |
|
"grad_norm": 0.32919788360595703, |
|
"learning_rate": 3.63574746418072e-05, |
|
"loss": 1.791, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.4462009178990312, |
|
"grad_norm": 0.35295575857162476, |
|
"learning_rate": 3.610799124087725e-05, |
|
"loss": 1.7232, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.4482406935237124, |
|
"grad_norm": 0.31556135416030884, |
|
"learning_rate": 3.585917803688603e-05, |
|
"loss": 1.6823, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.4502804691483937, |
|
"grad_norm": 0.33846956491470337, |
|
"learning_rate": 3.5611037639766265e-05, |
|
"loss": 1.5164, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.452320244773075, |
|
"grad_norm": 0.31997138261795044, |
|
"learning_rate": 3.5363572652393326e-05, |
|
"loss": 1.7178, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.4543600203977562, |
|
"grad_norm": 0.3398173749446869, |
|
"learning_rate": 3.511678567055786e-05, |
|
"loss": 1.6152, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.4563997960224375, |
|
"grad_norm": 0.3304274380207062, |
|
"learning_rate": 3.487067928293848e-05, |
|
"loss": 1.5052, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.4584395716471188, |
|
"grad_norm": 0.325420618057251, |
|
"learning_rate": 3.4625256071074773e-05, |
|
"loss": 1.4226, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.4604793472718, |
|
"grad_norm": 0.31153547763824463, |
|
"learning_rate": 3.4380518609340076e-05, |
|
"loss": 1.4929, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.4625191228964813, |
|
"grad_norm": 0.32968971133232117, |
|
"learning_rate": 3.4136469464914575e-05, |
|
"loss": 1.3787, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.4645588985211626, |
|
"grad_norm": 0.35368427634239197, |
|
"learning_rate": 3.389311119775828e-05, |
|
"loss": 1.8215, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.4665986741458439, |
|
"grad_norm": 0.3066471219062805, |
|
"learning_rate": 3.3650446360584275e-05, |
|
"loss": 1.6172, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.4686384497705252, |
|
"grad_norm": 0.31443992257118225, |
|
"learning_rate": 3.340847749883191e-05, |
|
"loss": 1.5386, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.4706782253952064, |
|
"grad_norm": 0.3136918246746063, |
|
"learning_rate": 3.316720715064e-05, |
|
"loss": 1.5974, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.4727180010198877, |
|
"grad_norm": 0.3218241333961487, |
|
"learning_rate": 3.292663784682036e-05, |
|
"loss": 1.8249, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.474757776644569, |
|
"grad_norm": 0.31746232509613037, |
|
"learning_rate": 3.268677211083109e-05, |
|
"loss": 1.6415, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.4767975522692502, |
|
"grad_norm": 0.35141000151634216, |
|
"learning_rate": 3.2447612458750365e-05, |
|
"loss": 1.6132, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.4788373278939317, |
|
"grad_norm": 0.32248514890670776, |
|
"learning_rate": 3.2209161399249674e-05, |
|
"loss": 1.6643, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.480877103518613, |
|
"grad_norm": 0.3245083689689636, |
|
"learning_rate": 3.197142143356787e-05, |
|
"loss": 1.5616, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.4829168791432943, |
|
"grad_norm": 0.33084774017333984, |
|
"learning_rate": 3.173439505548462e-05, |
|
"loss": 1.6546, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.4849566547679756, |
|
"grad_norm": 0.3269692063331604, |
|
"learning_rate": 3.149808475129452e-05, |
|
"loss": 1.4821, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.4869964303926568, |
|
"grad_norm": 0.3100489675998688, |
|
"learning_rate": 3.126249299978086e-05, |
|
"loss": 1.475, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.489036206017338, |
|
"grad_norm": 0.3064831495285034, |
|
"learning_rate": 3.102762227218957e-05, |
|
"loss": 1.443, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.4910759816420194, |
|
"grad_norm": 0.3394755721092224, |
|
"learning_rate": 3.079347503220351e-05, |
|
"loss": 1.6525, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.4931157572667006, |
|
"grad_norm": 0.33389967679977417, |
|
"learning_rate": 3.056005373591637e-05, |
|
"loss": 1.5118, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.495155532891382, |
|
"grad_norm": 0.3200013339519501, |
|
"learning_rate": 3.032736083180716e-05, |
|
"loss": 1.5438, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.4971953085160632, |
|
"grad_norm": 0.34980496764183044, |
|
"learning_rate": 3.0095398760714267e-05, |
|
"loss": 1.5078, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.4992350841407445, |
|
"grad_norm": 0.35308340191841125, |
|
"learning_rate": 2.9864169955810084e-05, |
|
"loss": 1.6637, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.4992350841407445, |
|
"eval_loss": 1.695379614830017, |
|
"eval_runtime": 48.7639, |
|
"eval_samples_per_second": 16.939, |
|
"eval_steps_per_second": 2.133, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.501274859765426, |
|
"grad_norm": 0.36427217721939087, |
|
"learning_rate": 2.9633676842575387e-05, |
|
"loss": 1.5404, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.5033146353901072, |
|
"grad_norm": 0.3411431908607483, |
|
"learning_rate": 2.940392183877382e-05, |
|
"loss": 1.6793, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.5053544110147885, |
|
"grad_norm": 0.3458103537559509, |
|
"learning_rate": 2.9174907354426696e-05, |
|
"loss": 1.4378, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.5073941866394698, |
|
"grad_norm": 0.32269302010536194, |
|
"learning_rate": 2.8946635791787545e-05, |
|
"loss": 1.5621, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.509433962264151, |
|
"grad_norm": 0.3315049111843109, |
|
"learning_rate": 2.8719109545317103e-05, |
|
"loss": 1.6883, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.5114737378888323, |
|
"grad_norm": 0.34527409076690674, |
|
"learning_rate": 2.8492331001657945e-05, |
|
"loss": 1.6777, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.5135135135135136, |
|
"grad_norm": 0.31591078639030457, |
|
"learning_rate": 2.8266302539609745e-05, |
|
"loss": 1.5866, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.5155532891381949, |
|
"grad_norm": 0.351625919342041, |
|
"learning_rate": 2.804102653010414e-05, |
|
"loss": 1.6674, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.5175930647628761, |
|
"grad_norm": 0.32702285051345825, |
|
"learning_rate": 2.7816505336179798e-05, |
|
"loss": 1.6834, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.5196328403875574, |
|
"grad_norm": 0.315550297498703, |
|
"learning_rate": 2.759274131295787e-05, |
|
"loss": 1.6263, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.5216726160122387, |
|
"grad_norm": 0.39837682247161865, |
|
"learning_rate": 2.736973680761702e-05, |
|
"loss": 1.5969, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.52371239163692, |
|
"grad_norm": 0.3614759147167206, |
|
"learning_rate": 2.7147494159369036e-05, |
|
"loss": 1.5608, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.5257521672616012, |
|
"grad_norm": 0.3324558138847351, |
|
"learning_rate": 2.6926015699434072e-05, |
|
"loss": 1.456, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.5277919428862825, |
|
"grad_norm": 0.3555881083011627, |
|
"learning_rate": 2.6705303751016408e-05, |
|
"loss": 1.62, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.5298317185109638, |
|
"grad_norm": 0.3169229030609131, |
|
"learning_rate": 2.6485360629279987e-05, |
|
"loss": 1.6364, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.531871494135645, |
|
"grad_norm": 0.3641349971294403, |
|
"learning_rate": 2.6266188641323996e-05, |
|
"loss": 1.7575, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.5339112697603263, |
|
"grad_norm": 0.3597756624221802, |
|
"learning_rate": 2.6047790086158952e-05, |
|
"loss": 1.7316, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.5359510453850076, |
|
"grad_norm": 0.30949273705482483, |
|
"learning_rate": 2.5830167254682257e-05, |
|
"loss": 1.6266, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.5379908210096889, |
|
"grad_norm": 0.3243308365345001, |
|
"learning_rate": 2.5613322429654574e-05, |
|
"loss": 1.6186, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.5400305966343701, |
|
"grad_norm": 0.34625059366226196, |
|
"learning_rate": 2.5397257885675397e-05, |
|
"loss": 1.4952, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.5420703722590514, |
|
"grad_norm": 0.34348833560943604, |
|
"learning_rate": 2.5181975889159615e-05, |
|
"loss": 1.5961, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.5441101478837327, |
|
"grad_norm": 0.3473558723926544, |
|
"learning_rate": 2.496747869831345e-05, |
|
"loss": 1.7014, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.546149923508414, |
|
"grad_norm": 0.3339255750179291, |
|
"learning_rate": 2.475376856311097e-05, |
|
"loss": 1.5541, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.5481896991330952, |
|
"grad_norm": 0.36445924639701843, |
|
"learning_rate": 2.4540847725270378e-05, |
|
"loss": 1.5242, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.5502294747577765, |
|
"grad_norm": 0.340023398399353, |
|
"learning_rate": 2.432871841823047e-05, |
|
"loss": 1.5245, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.5522692503824578, |
|
"grad_norm": 0.3025660514831543, |
|
"learning_rate": 2.411738286712735e-05, |
|
"loss": 1.4454, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.554309026007139, |
|
"grad_norm": 0.34481656551361084, |
|
"learning_rate": 2.3906843288770886e-05, |
|
"loss": 1.681, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.5563488016318205, |
|
"grad_norm": 0.326921284198761, |
|
"learning_rate": 2.3697101891621697e-05, |
|
"loss": 1.5377, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.5583885772565018, |
|
"grad_norm": 0.34051257371902466, |
|
"learning_rate": 2.3488160875767717e-05, |
|
"loss": 1.5051, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.560428352881183, |
|
"grad_norm": 0.32640084624290466, |
|
"learning_rate": 2.3280022432901383e-05, |
|
"loss": 1.6906, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.5624681285058644, |
|
"grad_norm": 0.36335480213165283, |
|
"learning_rate": 2.307268874629649e-05, |
|
"loss": 1.6184, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.5645079041305456, |
|
"grad_norm": 0.3171195387840271, |
|
"learning_rate": 2.2866161990785228e-05, |
|
"loss": 1.6957, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.566547679755227, |
|
"grad_norm": 0.32760629057884216, |
|
"learning_rate": 2.266044433273562e-05, |
|
"loss": 1.6545, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.5685874553799082, |
|
"grad_norm": 0.3048475980758667, |
|
"learning_rate": 2.245553793002849e-05, |
|
"loss": 1.5742, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.5706272310045895, |
|
"grad_norm": 0.33032047748565674, |
|
"learning_rate": 2.2251444932035094e-05, |
|
"loss": 1.5138, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.5726670066292707, |
|
"grad_norm": 0.3462781310081482, |
|
"learning_rate": 2.204816747959434e-05, |
|
"loss": 1.6444, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.5747067822539522, |
|
"grad_norm": 0.3318116068840027, |
|
"learning_rate": 2.184570770499056e-05, |
|
"loss": 1.7107, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.5767465578786335, |
|
"grad_norm": 0.3060275614261627, |
|
"learning_rate": 2.1644067731931007e-05, |
|
"loss": 1.5604, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.5787863335033148, |
|
"grad_norm": 0.37004798650741577, |
|
"learning_rate": 2.1443249675523536e-05, |
|
"loss": 1.527, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.580826109127996, |
|
"grad_norm": 0.3220769464969635, |
|
"learning_rate": 2.1243255642254578e-05, |
|
"loss": 1.591, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.5828658847526773, |
|
"grad_norm": 0.3345790505409241, |
|
"learning_rate": 2.1044087729966856e-05, |
|
"loss": 1.6216, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.5849056603773586, |
|
"grad_norm": 0.36904582381248474, |
|
"learning_rate": 2.0845748027837586e-05, |
|
"loss": 1.4141, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.5869454360020399, |
|
"grad_norm": 0.34023916721343994, |
|
"learning_rate": 2.0648238616356332e-05, |
|
"loss": 1.6076, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.5889852116267211, |
|
"grad_norm": 0.33998748660087585, |
|
"learning_rate": 2.045156156730338e-05, |
|
"loss": 1.472, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.5910249872514024, |
|
"grad_norm": 0.4052400290966034, |
|
"learning_rate": 2.025571894372794e-05, |
|
"loss": 1.6638, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.5930647628760837, |
|
"grad_norm": 0.3488968014717102, |
|
"learning_rate": 2.0060712799926408e-05, |
|
"loss": 1.765, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.595104538500765, |
|
"grad_norm": 0.3272939622402191, |
|
"learning_rate": 1.9866545181421013e-05, |
|
"loss": 1.5006, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.5971443141254462, |
|
"grad_norm": 0.31578630208969116, |
|
"learning_rate": 1.967321812493813e-05, |
|
"loss": 1.5492, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.5991840897501275, |
|
"grad_norm": 0.3504883944988251, |
|
"learning_rate": 1.9480733658387175e-05, |
|
"loss": 1.5501, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.6012238653748088, |
|
"grad_norm": 0.3361338675022125, |
|
"learning_rate": 1.9289093800839066e-05, |
|
"loss": 1.5733, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.60326364099949, |
|
"grad_norm": 0.37593501806259155, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 1.5149, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.6053034166241713, |
|
"grad_norm": 0.3598549962043762, |
|
"learning_rate": 1.8908355944716517e-05, |
|
"loss": 1.5617, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.6073431922488526, |
|
"grad_norm": 0.33485177159309387, |
|
"learning_rate": 1.871926193990202e-05, |
|
"loss": 1.51, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.6093829678735339, |
|
"grad_norm": 0.32729336619377136, |
|
"learning_rate": 1.8531020531568378e-05, |
|
"loss": 1.7241, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.6114227434982151, |
|
"grad_norm": 0.32850730419158936, |
|
"learning_rate": 1.8343633694278895e-05, |
|
"loss": 1.5208, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.6134625191228964, |
|
"grad_norm": 0.31730756163597107, |
|
"learning_rate": 1.8157103393632868e-05, |
|
"loss": 1.8503, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.6155022947475777, |
|
"grad_norm": 0.3501776456832886, |
|
"learning_rate": 1.7971431586244815e-05, |
|
"loss": 1.5293, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.617542070372259, |
|
"grad_norm": 0.36974573135375977, |
|
"learning_rate": 1.7786620219724204e-05, |
|
"loss": 1.6683, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.6195818459969402, |
|
"grad_norm": 0.3209790885448456, |
|
"learning_rate": 1.7602671232654754e-05, |
|
"loss": 1.5002, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.6216216216216215, |
|
"grad_norm": 0.3283923864364624, |
|
"learning_rate": 1.741958655457436e-05, |
|
"loss": 1.5206, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.6236613972463028, |
|
"grad_norm": 0.37643903493881226, |
|
"learning_rate": 1.723736810595461e-05, |
|
"loss": 1.5189, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.625701172870984, |
|
"grad_norm": 0.3263504207134247, |
|
"learning_rate": 1.7056017798180824e-05, |
|
"loss": 1.5491, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.6277409484956655, |
|
"grad_norm": 0.3444812297821045, |
|
"learning_rate": 1.6875537533531948e-05, |
|
"loss": 1.5142, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.6297807241203468, |
|
"grad_norm": 0.3257887363433838, |
|
"learning_rate": 1.6695929205160487e-05, |
|
"loss": 1.6884, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.631820499745028, |
|
"grad_norm": 0.36854130029678345, |
|
"learning_rate": 1.65171946970729e-05, |
|
"loss": 1.574, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.6338602753697093, |
|
"grad_norm": 0.34607166051864624, |
|
"learning_rate": 1.6339335884109518e-05, |
|
"loss": 1.5739, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.6359000509943906, |
|
"grad_norm": 0.3600076138973236, |
|
"learning_rate": 1.6162354631925204e-05, |
|
"loss": 1.6571, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.637939826619072, |
|
"grad_norm": 0.3381345868110657, |
|
"learning_rate": 1.598625279696948e-05, |
|
"loss": 1.6067, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.6399796022437532, |
|
"grad_norm": 0.33851006627082825, |
|
"learning_rate": 1.5811032226467305e-05, |
|
"loss": 1.5163, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.6420193778684344, |
|
"grad_norm": 0.3231305778026581, |
|
"learning_rate": 1.563669475839956e-05, |
|
"loss": 1.614, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.6440591534931157, |
|
"grad_norm": 0.3223206400871277, |
|
"learning_rate": 1.5463242221483743e-05, |
|
"loss": 1.4504, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.6460989291177972, |
|
"grad_norm": 0.34715861082077026, |
|
"learning_rate": 1.529067643515495e-05, |
|
"loss": 1.7934, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.6481387047424785, |
|
"grad_norm": 0.31931138038635254, |
|
"learning_rate": 1.5118999209546559e-05, |
|
"loss": 1.5801, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.6501784803671598, |
|
"grad_norm": 0.31838297843933105, |
|
"learning_rate": 1.4948212345471491e-05, |
|
"loss": 1.7495, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.652218255991841, |
|
"grad_norm": 0.3544289767742157, |
|
"learning_rate": 1.4778317634403083e-05, |
|
"loss": 1.6048, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.6542580316165223, |
|
"grad_norm": 0.3429974317550659, |
|
"learning_rate": 1.460931685845649e-05, |
|
"loss": 1.6734, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.6562978072412036, |
|
"grad_norm": 0.3236132562160492, |
|
"learning_rate": 1.444121179036989e-05, |
|
"loss": 1.5718, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.6583375828658848, |
|
"grad_norm": 0.3145132064819336, |
|
"learning_rate": 1.427400419348588e-05, |
|
"loss": 1.504, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.6603773584905661, |
|
"grad_norm": 0.35008659958839417, |
|
"learning_rate": 1.4107695821733025e-05, |
|
"loss": 1.513, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.6624171341152474, |
|
"grad_norm": 0.36491960287094116, |
|
"learning_rate": 1.3942288419607475e-05, |
|
"loss": 1.5571, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.6644569097399287, |
|
"grad_norm": 0.34494370222091675, |
|
"learning_rate": 1.3777783722154603e-05, |
|
"loss": 1.598, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.66649668536461, |
|
"grad_norm": 0.3233909010887146, |
|
"learning_rate": 1.3614183454950824e-05, |
|
"loss": 1.7063, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.6685364609892912, |
|
"grad_norm": 0.31574997305870056, |
|
"learning_rate": 1.3451489334085554e-05, |
|
"loss": 1.5871, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.6705762366139725, |
|
"grad_norm": 0.36086753010749817, |
|
"learning_rate": 1.3289703066143111e-05, |
|
"loss": 1.5401, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.6726160122386537, |
|
"grad_norm": 0.35289233922958374, |
|
"learning_rate": 1.3128826348184887e-05, |
|
"loss": 1.5204, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.674655787863335, |
|
"grad_norm": 0.3321409821510315, |
|
"learning_rate": 1.2968860867731569e-05, |
|
"loss": 1.647, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.6766955634880163, |
|
"grad_norm": 0.3479863405227661, |
|
"learning_rate": 1.2809808302745297e-05, |
|
"loss": 1.6429, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.6787353391126976, |
|
"grad_norm": 0.3064015209674835, |
|
"learning_rate": 1.2651670321612263e-05, |
|
"loss": 1.349, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.6807751147373788, |
|
"grad_norm": 0.34040096402168274, |
|
"learning_rate": 1.2494448583125018e-05, |
|
"loss": 1.5489, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.6828148903620601, |
|
"grad_norm": 0.3219228982925415, |
|
"learning_rate": 1.233814473646524e-05, |
|
"loss": 1.5782, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.6848546659867414, |
|
"grad_norm": 0.3284182846546173, |
|
"learning_rate": 1.218276042118629e-05, |
|
"loss": 1.7502, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.6868944416114227, |
|
"grad_norm": 0.3325609862804413, |
|
"learning_rate": 1.202829726719611e-05, |
|
"loss": 1.6647, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.688934217236104, |
|
"grad_norm": 0.3373055160045624, |
|
"learning_rate": 1.1874756894740135e-05, |
|
"loss": 1.6314, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.6909739928607852, |
|
"grad_norm": 0.3512837886810303, |
|
"learning_rate": 1.172214091438416e-05, |
|
"loss": 1.595, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.6930137684854665, |
|
"grad_norm": 0.32167279720306396, |
|
"learning_rate": 1.1570450926997655e-05, |
|
"loss": 1.5507, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.6950535441101477, |
|
"grad_norm": 0.37384939193725586, |
|
"learning_rate": 1.141968852373676e-05, |
|
"loss": 1.7151, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.697093319734829, |
|
"grad_norm": 0.3775462806224823, |
|
"learning_rate": 1.1269855286027797e-05, |
|
"loss": 1.6552, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.6991330953595103, |
|
"grad_norm": 0.3442714512348175, |
|
"learning_rate": 1.1120952785550476e-05, |
|
"loss": 1.416, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.7011728709841918, |
|
"grad_norm": 0.3268585205078125, |
|
"learning_rate": 1.0972982584221592e-05, |
|
"loss": 1.5926, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.703212646608873, |
|
"grad_norm": 0.33221572637557983, |
|
"learning_rate": 1.0825946234178574e-05, |
|
"loss": 1.5878, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.7052524222335543, |
|
"grad_norm": 0.32683464884757996, |
|
"learning_rate": 1.067984527776309e-05, |
|
"loss": 1.5254, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.7072921978582356, |
|
"grad_norm": 0.34125399589538574, |
|
"learning_rate": 1.0534681247505106e-05, |
|
"loss": 1.701, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.7093319734829169, |
|
"grad_norm": 0.33410507440567017, |
|
"learning_rate": 1.0390455666106547e-05, |
|
"loss": 1.5889, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.7113717491075982, |
|
"grad_norm": 0.3767413794994354, |
|
"learning_rate": 1.024717004642557e-05, |
|
"loss": 1.7098, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.7134115247322794, |
|
"grad_norm": 0.32880616188049316, |
|
"learning_rate": 1.010482589146048e-05, |
|
"loss": 1.6303, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.7154513003569607, |
|
"grad_norm": 0.3286539316177368, |
|
"learning_rate": 9.963424694334122e-06, |
|
"loss": 1.7356, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.717491075981642, |
|
"grad_norm": 0.3123774528503418, |
|
"learning_rate": 9.822967938278171e-06, |
|
"loss": 1.6089, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.7195308516063235, |
|
"grad_norm": 0.32634440064430237, |
|
"learning_rate": 9.683457096617488e-06, |
|
"loss": 1.5619, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.7215706272310047, |
|
"grad_norm": 0.33105430006980896, |
|
"learning_rate": 9.544893632754814e-06, |
|
"loss": 1.5669, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.723610402855686, |
|
"grad_norm": 0.3234579563140869, |
|
"learning_rate": 9.407279000155312e-06, |
|
"loss": 1.4746, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.7256501784803673, |
|
"grad_norm": 0.3418089747428894, |
|
"learning_rate": 9.270614642331376e-06, |
|
"loss": 1.4701, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.7276899541050486, |
|
"grad_norm": 0.2889448404312134, |
|
"learning_rate": 9.134901992827427e-06, |
|
"loss": 1.6088, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.7297297297297298, |
|
"grad_norm": 0.33682364225387573, |
|
"learning_rate": 9.000142475204964e-06, |
|
"loss": 1.7242, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.731769505354411, |
|
"grad_norm": 0.3277648389339447, |
|
"learning_rate": 8.866337503027522e-06, |
|
"loss": 1.4125, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.7338092809790924, |
|
"grad_norm": 0.3308608829975128, |
|
"learning_rate": 8.733488479845997e-06, |
|
"loss": 1.5832, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.7358490566037736, |
|
"grad_norm": 0.3282967209815979, |
|
"learning_rate": 8.60159679918372e-06, |
|
"loss": 1.5004, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.737888832228455, |
|
"grad_norm": 0.31315362453460693, |
|
"learning_rate": 8.470663844522052e-06, |
|
"loss": 1.6842, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.7399286078531362, |
|
"grad_norm": 0.3475480377674103, |
|
"learning_rate": 8.340690989285726e-06, |
|
"loss": 1.8289, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.7419683834778175, |
|
"grad_norm": 0.31732621788978577, |
|
"learning_rate": 8.21167959682848e-06, |
|
"loss": 1.531, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.7440081591024987, |
|
"grad_norm": 0.3633415102958679, |
|
"learning_rate": 8.083631020418791e-06, |
|
"loss": 1.4785, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.74604793472718, |
|
"grad_norm": 0.3418223559856415, |
|
"learning_rate": 7.956546603225601e-06, |
|
"loss": 1.6414, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.7480877103518613, |
|
"grad_norm": 0.3294214904308319, |
|
"learning_rate": 7.830427678304353e-06, |
|
"loss": 1.5571, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.7501274859765426, |
|
"grad_norm": 0.3359309732913971, |
|
"learning_rate": 7.705275568582848e-06, |
|
"loss": 1.5797, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.7521672616012238, |
|
"grad_norm": 0.3166341781616211, |
|
"learning_rate": 7.581091586847522e-06, |
|
"loss": 1.5041, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.754207037225905, |
|
"grad_norm": 0.34301674365997314, |
|
"learning_rate": 7.457877035729588e-06, |
|
"loss": 1.6699, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.7562468128505864, |
|
"grad_norm": 0.3607090413570404, |
|
"learning_rate": 7.335633207691361e-06, |
|
"loss": 1.6225, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.7582865884752676, |
|
"grad_norm": 0.3538651764392853, |
|
"learning_rate": 7.21436138501278e-06, |
|
"loss": 1.7165, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.760326364099949, |
|
"grad_norm": 0.335679292678833, |
|
"learning_rate": 7.094062839777837e-06, |
|
"loss": 1.4925, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.7623661397246302, |
|
"grad_norm": 0.36252662539482117, |
|
"learning_rate": 6.974738833861383e-06, |
|
"loss": 1.4831, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.7644059153493115, |
|
"grad_norm": 0.34220951795578003, |
|
"learning_rate": 6.856390618915775e-06, |
|
"loss": 1.6031, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.7664456909739927, |
|
"grad_norm": 0.32884883880615234, |
|
"learning_rate": 6.739019436357774e-06, |
|
"loss": 1.6789, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.768485466598674, |
|
"grad_norm": 0.32627391815185547, |
|
"learning_rate": 6.622626517355557e-06, |
|
"loss": 1.517, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.7705252422233553, |
|
"grad_norm": 0.3443872928619385, |
|
"learning_rate": 6.507213082815744e-06, |
|
"loss": 1.7773, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.7725650178480366, |
|
"grad_norm": 0.3367038369178772, |
|
"learning_rate": 6.392780343370686e-06, |
|
"loss": 1.5157, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.774604793472718, |
|
"grad_norm": 0.34665989875793457, |
|
"learning_rate": 6.2793294993656494e-06, |
|
"loss": 1.5797, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.7766445690973993, |
|
"grad_norm": 0.37426507472991943, |
|
"learning_rate": 6.166861740846297e-06, |
|
"loss": 1.6184, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.7786843447220806, |
|
"grad_norm": 0.3536684811115265, |
|
"learning_rate": 6.055378247546218e-06, |
|
"loss": 1.5539, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.7807241203467619, |
|
"grad_norm": 0.3466222584247589, |
|
"learning_rate": 5.9448801888744795e-06, |
|
"loss": 1.6129, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.7827638959714431, |
|
"grad_norm": 0.33310428261756897, |
|
"learning_rate": 5.835368723903456e-06, |
|
"loss": 1.591, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.7848036715961244, |
|
"grad_norm": 0.37535133957862854, |
|
"learning_rate": 5.726845001356573e-06, |
|
"loss": 1.6924, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.7868434472208057, |
|
"grad_norm": 0.3487565517425537, |
|
"learning_rate": 5.6193101595963585e-06, |
|
"loss": 1.6721, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.788883222845487, |
|
"grad_norm": 0.3542119562625885, |
|
"learning_rate": 5.512765326612379e-06, |
|
"loss": 1.5829, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.7909229984701684, |
|
"grad_norm": 0.37221917510032654, |
|
"learning_rate": 5.407211620009544e-06, |
|
"loss": 1.5421, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.7929627740948497, |
|
"grad_norm": 0.31831902265548706, |
|
"learning_rate": 5.30265014699628e-06, |
|
"loss": 1.6259, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.795002549719531, |
|
"grad_norm": 0.33141520619392395, |
|
"learning_rate": 5.199082004372957e-06, |
|
"loss": 1.4876, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.7970423253442123, |
|
"grad_norm": 0.37575435638427734, |
|
"learning_rate": 5.096508278520384e-06, |
|
"loss": 1.8337, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.7990821009688935, |
|
"grad_norm": 0.3518213927745819, |
|
"learning_rate": 4.994930045388413e-06, |
|
"loss": 1.3687, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.8011218765935748, |
|
"grad_norm": 0.3328496217727661, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 1.6246, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.803161652218256, |
|
"grad_norm": 0.3764415383338928, |
|
"learning_rate": 4.794764308863242e-06, |
|
"loss": 1.901, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.8052014278429374, |
|
"grad_norm": 0.352507621049881, |
|
"learning_rate": 4.6961789051139124e-06, |
|
"loss": 1.5077, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.8072412034676186, |
|
"grad_norm": 0.3292696475982666, |
|
"learning_rate": 4.5985931933508754e-06, |
|
"loss": 1.4446, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.8092809790923, |
|
"grad_norm": 0.37563368678092957, |
|
"learning_rate": 4.502008197202068e-06, |
|
"loss": 1.6857, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.8113207547169812, |
|
"grad_norm": 0.3451184928417206, |
|
"learning_rate": 4.406424929798403e-06, |
|
"loss": 1.4763, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.8133605303416624, |
|
"grad_norm": 0.33383306860923767, |
|
"learning_rate": 4.311844393763109e-06, |
|
"loss": 1.4001, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.8154003059663437, |
|
"grad_norm": 0.32124102115631104, |
|
"learning_rate": 4.2182675812012965e-06, |
|
"loss": 1.5507, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.817440081591025, |
|
"grad_norm": 0.3836451768875122, |
|
"learning_rate": 4.125695473689406e-06, |
|
"loss": 1.571, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.8194798572157063, |
|
"grad_norm": 0.33983170986175537, |
|
"learning_rate": 4.034129042265066e-06, |
|
"loss": 1.6818, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.8215196328403875, |
|
"grad_norm": 0.3245023488998413, |
|
"learning_rate": 3.943569247416801e-06, |
|
"loss": 1.3421, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.8235594084650688, |
|
"grad_norm": 0.3230661451816559, |
|
"learning_rate": 3.854017039074009e-06, |
|
"loss": 1.4876, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.82559918408975, |
|
"grad_norm": 0.3374238610267639, |
|
"learning_rate": 3.7654733565969826e-06, |
|
"loss": 1.7574, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.8276389597144314, |
|
"grad_norm": 0.3538075089454651, |
|
"learning_rate": 3.6779391287670494e-06, |
|
"loss": 1.5753, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.8296787353391126, |
|
"grad_norm": 0.3529132008552551, |
|
"learning_rate": 3.591415273776855e-06, |
|
"loss": 1.5352, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.831718510963794, |
|
"grad_norm": 0.33535516262054443, |
|
"learning_rate": 3.5059026992206647e-06, |
|
"loss": 1.4236, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.8337582865884752, |
|
"grad_norm": 0.32166826725006104, |
|
"learning_rate": 3.421402302084953e-06, |
|
"loss": 1.5437, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.8357980622131564, |
|
"grad_norm": 0.35232868790626526, |
|
"learning_rate": 3.3379149687388867e-06, |
|
"loss": 1.5435, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.8378378378378377, |
|
"grad_norm": 0.3278467059135437, |
|
"learning_rate": 3.2554415749250888e-06, |
|
"loss": 1.5345, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.839877613462519, |
|
"grad_norm": 0.33369168639183044, |
|
"learning_rate": 3.1739829857504234e-06, |
|
"loss": 1.5248, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.8419173890872003, |
|
"grad_norm": 0.35533764958381653, |
|
"learning_rate": 3.093540055676958e-06, |
|
"loss": 1.7297, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.8439571647118815, |
|
"grad_norm": 0.3504745364189148, |
|
"learning_rate": 3.014113628512982e-06, |
|
"loss": 1.6832, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.845996940336563, |
|
"grad_norm": 0.33277443051338196, |
|
"learning_rate": 2.9357045374040825e-06, |
|
"loss": 1.514, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.8480367159612443, |
|
"grad_norm": 0.3157265782356262, |
|
"learning_rate": 2.8583136048245697e-06, |
|
"loss": 1.5827, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.8500764915859256, |
|
"grad_norm": 0.3428395986557007, |
|
"learning_rate": 2.781941642568686e-06, |
|
"loss": 1.5938, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.8521162672106068, |
|
"grad_norm": 0.3374231159687042, |
|
"learning_rate": 2.706589451742181e-06, |
|
"loss": 1.4763, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.8541560428352881, |
|
"grad_norm": 0.3595046401023865, |
|
"learning_rate": 2.632257822753881e-06, |
|
"loss": 1.4746, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.8561958184599694, |
|
"grad_norm": 0.3170601725578308, |
|
"learning_rate": 2.5589475353073988e-06, |
|
"loss": 1.4764, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.8582355940846507, |
|
"grad_norm": 0.34321513772010803, |
|
"learning_rate": 2.486659358392951e-06, |
|
"loss": 1.6465, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.860275369709332, |
|
"grad_norm": 0.3204958736896515, |
|
"learning_rate": 2.415394050279318e-06, |
|
"loss": 1.7065, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.8623151453340132, |
|
"grad_norm": 0.3391380310058594, |
|
"learning_rate": 2.3451523585058754e-06, |
|
"loss": 1.5181, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.8643549209586947, |
|
"grad_norm": 0.3181851804256439, |
|
"learning_rate": 2.2759350198746976e-06, |
|
"loss": 1.5589, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.866394696583376, |
|
"grad_norm": 0.3439972996711731, |
|
"learning_rate": 2.2077427604429433e-06, |
|
"loss": 1.4837, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.8684344722080573, |
|
"grad_norm": 0.33056753873825073, |
|
"learning_rate": 2.1405762955151176e-06, |
|
"loss": 1.4456, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.8704742478327385, |
|
"grad_norm": 0.3104906976222992, |
|
"learning_rate": 2.074436329635687e-06, |
|
"loss": 1.6013, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.8725140234574198, |
|
"grad_norm": 0.35013991594314575, |
|
"learning_rate": 2.009323556581566e-06, |
|
"loss": 1.6274, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.874553799082101, |
|
"grad_norm": 0.33168038725852966, |
|
"learning_rate": 1.945238659354953e-06, |
|
"loss": 1.5064, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.8765935747067823, |
|
"grad_norm": 0.3438383936882019, |
|
"learning_rate": 1.882182310176095e-06, |
|
"loss": 1.5561, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.8786333503314636, |
|
"grad_norm": 0.33750832080841064, |
|
"learning_rate": 1.8201551704762453e-06, |
|
"loss": 1.5402, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.8806731259561449, |
|
"grad_norm": 0.3846586346626282, |
|
"learning_rate": 1.7591578908907724e-06, |
|
"loss": 1.7503, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.8827129015808262, |
|
"grad_norm": 0.36325526237487793, |
|
"learning_rate": 1.6991911112522407e-06, |
|
"loss": 1.5359, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.8847526772055074, |
|
"grad_norm": 0.3386278748512268, |
|
"learning_rate": 1.6402554605838172e-06, |
|
"loss": 1.4694, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.8867924528301887, |
|
"grad_norm": 0.3506017327308655, |
|
"learning_rate": 1.5823515570925763e-06, |
|
"loss": 1.7565, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.88883222845487, |
|
"grad_norm": 0.30699026584625244, |
|
"learning_rate": 1.5254800081630826e-06, |
|
"loss": 1.4995, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.8908720040795512, |
|
"grad_norm": 0.32542306184768677, |
|
"learning_rate": 1.4696414103509636e-06, |
|
"loss": 1.6048, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.8929117797042325, |
|
"grad_norm": 0.3201080560684204, |
|
"learning_rate": 1.4148363493766802e-06, |
|
"loss": 1.6548, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.8949515553289138, |
|
"grad_norm": 0.35210901498794556, |
|
"learning_rate": 1.361065400119399e-06, |
|
"loss": 1.5078, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.896991330953595, |
|
"grad_norm": 0.3742671012878418, |
|
"learning_rate": 1.30832912661093e-06, |
|
"loss": 1.411, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.8990311065782763, |
|
"grad_norm": 0.31074491143226624, |
|
"learning_rate": 1.2566280820298426e-06, |
|
"loss": 1.3539, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.9010708822029576, |
|
"grad_norm": 0.33461233973503113, |
|
"learning_rate": 1.2059628086956044e-06, |
|
"loss": 1.6129, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.9031106578276389, |
|
"grad_norm": 0.3572944700717926, |
|
"learning_rate": 1.1563338380629618e-06, |
|
"loss": 1.4976, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.9051504334523202, |
|
"grad_norm": 0.34117141366004944, |
|
"learning_rate": 1.1077416907163574e-06, |
|
"loss": 1.4853, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.9071902090770014, |
|
"grad_norm": 0.34180155396461487, |
|
"learning_rate": 1.0601868763643996e-06, |
|
"loss": 1.6407, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.9092299847016827, |
|
"grad_norm": 0.3573477864265442, |
|
"learning_rate": 1.0136698938346011e-06, |
|
"loss": 1.6081, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.911269760326364, |
|
"grad_norm": 0.35802096128463745, |
|
"learning_rate": 9.68191231068083e-07, |
|
"loss": 1.7181, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.9133095359510452, |
|
"grad_norm": 0.336832731962204, |
|
"learning_rate": 9.237513651145225e-07, |
|
"loss": 1.3533, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.9153493115757265, |
|
"grad_norm": 0.3149198293685913, |
|
"learning_rate": 8.803507621270579e-07, |
|
"loss": 1.352, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.9173890872004078, |
|
"grad_norm": 0.3341595232486725, |
|
"learning_rate": 8.379898773574924e-07, |
|
"loss": 1.5971, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.9194288628250893, |
|
"grad_norm": 0.34054580330848694, |
|
"learning_rate": 7.966691551514527e-07, |
|
"loss": 1.6251, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.9214686384497706, |
|
"grad_norm": 0.3276701271533966, |
|
"learning_rate": 7.563890289437825e-07, |
|
"loss": 1.598, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.9235084140744518, |
|
"grad_norm": 0.3295578062534332, |
|
"learning_rate": 7.171499212539123e-07, |
|
"loss": 1.6034, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.925548189699133, |
|
"grad_norm": 0.3124001920223236, |
|
"learning_rate": 6.78952243681541e-07, |
|
"loss": 1.4692, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.9275879653238144, |
|
"grad_norm": 0.35024166107177734, |
|
"learning_rate": 6.41796396902239e-07, |
|
"loss": 1.532, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.9296277409484957, |
|
"grad_norm": 0.34173583984375, |
|
"learning_rate": 6.056827706632185e-07, |
|
"loss": 1.6586, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.931667516573177, |
|
"grad_norm": 0.3405357003211975, |
|
"learning_rate": 5.706117437793701e-07, |
|
"loss": 1.6072, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.9337072921978582, |
|
"grad_norm": 0.31896910071372986, |
|
"learning_rate": 5.365836841291438e-07, |
|
"loss": 1.5897, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.9357470678225395, |
|
"grad_norm": 0.3417130410671234, |
|
"learning_rate": 5.035989486508075e-07, |
|
"loss": 1.6046, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.937786843447221, |
|
"grad_norm": 0.3479056656360626, |
|
"learning_rate": 4.7165788333860536e-07, |
|
"loss": 1.6084, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.9398266190719022, |
|
"grad_norm": 0.34046056866645813, |
|
"learning_rate": 4.4076082323920576e-07, |
|
"loss": 1.4111, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.9418663946965835, |
|
"grad_norm": 0.3575671315193176, |
|
"learning_rate": 4.1090809244814785e-07, |
|
"loss": 1.4932, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.9439061703212648, |
|
"grad_norm": 0.35397884249687195, |
|
"learning_rate": 3.82100004106456e-07, |
|
"loss": 1.7126, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.945945945945946, |
|
"grad_norm": 0.33311253786087036, |
|
"learning_rate": 3.543368603973529e-07, |
|
"loss": 1.6558, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.9479857215706273, |
|
"grad_norm": 0.35361233353614807, |
|
"learning_rate": 3.2761895254306287e-07, |
|
"loss": 1.7049, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.9500254971953086, |
|
"grad_norm": 0.35136890411376953, |
|
"learning_rate": 3.019465608018024e-07, |
|
"loss": 1.4853, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.9520652728199899, |
|
"grad_norm": 0.32909440994262695, |
|
"learning_rate": 2.773199544648164e-07, |
|
"loss": 1.534, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.9541050484446711, |
|
"grad_norm": 0.3327445387840271, |
|
"learning_rate": 2.537393918535358e-07, |
|
"loss": 1.4084, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.9561448240693524, |
|
"grad_norm": 0.3465093672275543, |
|
"learning_rate": 2.312051203169352e-07, |
|
"loss": 1.5575, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.9581845996940337, |
|
"grad_norm": 0.31249111890792847, |
|
"learning_rate": 2.0971737622883515e-07, |
|
"loss": 1.6387, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.960224375318715, |
|
"grad_norm": 0.32050248980522156, |
|
"learning_rate": 1.8927638498551502e-07, |
|
"loss": 1.6431, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.9622641509433962, |
|
"grad_norm": 0.31345027685165405, |
|
"learning_rate": 1.6988236100329292e-07, |
|
"loss": 1.5125, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.9643039265680775, |
|
"grad_norm": 0.32769250869750977, |
|
"learning_rate": 1.5153550771630498e-07, |
|
"loss": 1.5726, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.9663437021927588, |
|
"grad_norm": 0.32842305302619934, |
|
"learning_rate": 1.3423601757436287e-07, |
|
"loss": 1.5826, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.96838347781744, |
|
"grad_norm": 0.34551113843917847, |
|
"learning_rate": 1.179840720409331e-07, |
|
"loss": 1.6897, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.9704232534421213, |
|
"grad_norm": 0.35393452644348145, |
|
"learning_rate": 1.0277984159122733e-07, |
|
"loss": 1.6387, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.9724630290668026, |
|
"grad_norm": 0.3255729377269745, |
|
"learning_rate": 8.862348571043733e-08, |
|
"loss": 1.5134, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.9745028046914839, |
|
"grad_norm": 0.334995836019516, |
|
"learning_rate": 7.551515289203615e-08, |
|
"loss": 1.4714, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.9765425803161651, |
|
"grad_norm": 0.3348861634731293, |
|
"learning_rate": 6.34549806362239e-08, |
|
"loss": 1.4172, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.9785823559408464, |
|
"grad_norm": 0.34796562790870667, |
|
"learning_rate": 5.2443095448506674e-08, |
|
"loss": 1.3779, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.9806221315655277, |
|
"grad_norm": 0.36725080013275146, |
|
"learning_rate": 4.247961283835311e-08, |
|
"loss": 1.4744, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.982661907190209, |
|
"grad_norm": 0.349135160446167, |
|
"learning_rate": 3.356463731798432e-08, |
|
"loss": 1.6751, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.9847016828148902, |
|
"grad_norm": 0.34291085600852966, |
|
"learning_rate": 2.5698262401263605e-08, |
|
"loss": 1.5718, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.9867414584395715, |
|
"grad_norm": 0.3310767710208893, |
|
"learning_rate": 1.888057060274173e-08, |
|
"loss": 1.6042, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.9887812340642528, |
|
"grad_norm": 0.3502123951911926, |
|
"learning_rate": 1.3111633436779791e-08, |
|
"loss": 1.6323, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.990821009688934, |
|
"grad_norm": 0.34913215041160583, |
|
"learning_rate": 8.391511416816489e-09, |
|
"loss": 1.5433, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.9928607853136155, |
|
"grad_norm": 0.3260555863380432, |
|
"learning_rate": 4.720254054679796e-09, |
|
"loss": 1.5161, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.9949005609382968, |
|
"grad_norm": 0.3483281433582306, |
|
"learning_rate": 2.0978998601206556e-09, |
|
"loss": 1.5955, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.996940336562978, |
|
"grad_norm": 0.33206477761268616, |
|
"learning_rate": 5.244763404133046e-10, |
|
"loss": 1.6749, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.9989801121876594, |
|
"grad_norm": 0.34603679180145264, |
|
"learning_rate": 0.0, |
|
"loss": 1.6485, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.9989801121876594, |
|
"eval_loss": 1.6838241815567017, |
|
"eval_runtime": 49.2126, |
|
"eval_samples_per_second": 16.784, |
|
"eval_steps_per_second": 2.113, |
|
"step": 980 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 980, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 490, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.465388413369713e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|