|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9994900560938297, |
|
"eval_steps": 245, |
|
"global_step": 490, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002039775624681285, |
|
"grad_norm": 0.08994754403829575, |
|
"learning_rate": 2e-05, |
|
"loss": 2.6733, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002039775624681285, |
|
"eval_loss": 2.6805498600006104, |
|
"eval_runtime": 48.2372, |
|
"eval_samples_per_second": 17.124, |
|
"eval_steps_per_second": 2.156, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00407955124936257, |
|
"grad_norm": 0.09641231596469879, |
|
"learning_rate": 4e-05, |
|
"loss": 2.7875, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.006119326874043855, |
|
"grad_norm": 0.10044872015714645, |
|
"learning_rate": 6e-05, |
|
"loss": 2.6516, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00815910249872514, |
|
"grad_norm": 0.08677750825881958, |
|
"learning_rate": 8e-05, |
|
"loss": 2.4109, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.010198878123406425, |
|
"grad_norm": 0.10869669914245605, |
|
"learning_rate": 0.0001, |
|
"loss": 2.6336, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01223865374808771, |
|
"grad_norm": 0.1180991679430008, |
|
"learning_rate": 0.00012, |
|
"loss": 2.6001, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.014278429372768995, |
|
"grad_norm": 0.1641693264245987, |
|
"learning_rate": 0.00014, |
|
"loss": 2.9137, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01631820499745028, |
|
"grad_norm": 0.17794281244277954, |
|
"learning_rate": 0.00016, |
|
"loss": 2.835, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.018357980622131564, |
|
"grad_norm": 0.16260501742362976, |
|
"learning_rate": 0.00018, |
|
"loss": 2.6015, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02039775624681285, |
|
"grad_norm": 0.13795699179172516, |
|
"learning_rate": 0.0002, |
|
"loss": 2.2552, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.022437531871494134, |
|
"grad_norm": 0.15532873570919037, |
|
"learning_rate": 0.00019999947552365961, |
|
"loss": 2.4393, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02447730749617542, |
|
"grad_norm": 0.159002423286438, |
|
"learning_rate": 0.00019999790210013988, |
|
"loss": 2.7684, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.026517083120856707, |
|
"grad_norm": 0.17115084826946259, |
|
"learning_rate": 0.0001999952797459453, |
|
"loss": 2.2409, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02855685874553799, |
|
"grad_norm": 0.26108402013778687, |
|
"learning_rate": 0.0001999916084885832, |
|
"loss": 2.6388, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.030596634370219276, |
|
"grad_norm": 0.29758986830711365, |
|
"learning_rate": 0.00019998688836656323, |
|
"loss": 2.4358, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03263640999490056, |
|
"grad_norm": 0.2338196337223053, |
|
"learning_rate": 0.0001999811194293973, |
|
"loss": 2.3898, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03467618561958185, |
|
"grad_norm": 0.2143183797597885, |
|
"learning_rate": 0.00019997430173759875, |
|
"loss": 2.6874, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03671596124426313, |
|
"grad_norm": 0.19598309695720673, |
|
"learning_rate": 0.00019996643536268204, |
|
"loss": 2.409, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.038755736868944415, |
|
"grad_norm": 0.16046980023384094, |
|
"learning_rate": 0.00019995752038716168, |
|
"loss": 2.2378, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0407955124936257, |
|
"grad_norm": 0.15274696052074432, |
|
"learning_rate": 0.00019994755690455152, |
|
"loss": 2.3413, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04283528811830699, |
|
"grad_norm": 0.24761317670345306, |
|
"learning_rate": 0.0001999365450193638, |
|
"loss": 2.3382, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04487506374298827, |
|
"grad_norm": 0.2602517604827881, |
|
"learning_rate": 0.00019992448484710797, |
|
"loss": 2.3355, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.046914839367669554, |
|
"grad_norm": 0.25506100058555603, |
|
"learning_rate": 0.00019991137651428957, |
|
"loss": 2.4828, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.04895461499235084, |
|
"grad_norm": 0.19493895769119263, |
|
"learning_rate": 0.0001998972201584088, |
|
"loss": 2.3154, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05099439061703213, |
|
"grad_norm": 0.16005843877792358, |
|
"learning_rate": 0.0001998820159279591, |
|
"loss": 2.1472, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.053034166241713414, |
|
"grad_norm": 0.17840257287025452, |
|
"learning_rate": 0.00019986576398242566, |
|
"loss": 2.1624, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05507394186639469, |
|
"grad_norm": 0.15280942618846893, |
|
"learning_rate": 0.0001998484644922837, |
|
"loss": 2.0526, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05711371749107598, |
|
"grad_norm": 0.16933050751686096, |
|
"learning_rate": 0.00019983011763899673, |
|
"loss": 2.2176, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.059153493115757266, |
|
"grad_norm": 0.21265609562397003, |
|
"learning_rate": 0.0001998107236150145, |
|
"loss": 2.2559, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06119326874043855, |
|
"grad_norm": 0.19340573251247406, |
|
"learning_rate": 0.00019979028262377118, |
|
"loss": 2.3202, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06323304436511984, |
|
"grad_norm": 0.16693681478500366, |
|
"learning_rate": 0.0001997687948796831, |
|
"loss": 2.0623, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06527281998980113, |
|
"grad_norm": 0.18830184638500214, |
|
"learning_rate": 0.00019974626060814647, |
|
"loss": 2.1929, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06731259561448241, |
|
"grad_norm": 0.16206099092960358, |
|
"learning_rate": 0.0001997226800455352, |
|
"loss": 2.3048, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0693523712391637, |
|
"grad_norm": 0.21650008857250214, |
|
"learning_rate": 0.00019969805343919821, |
|
"loss": 2.2928, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07139214686384497, |
|
"grad_norm": 0.19560708105564117, |
|
"learning_rate": 0.00019967238104745696, |
|
"loss": 2.4389, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07343192248852626, |
|
"grad_norm": 0.17428375780582428, |
|
"learning_rate": 0.00019964566313960264, |
|
"loss": 2.1017, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07547169811320754, |
|
"grad_norm": 0.14846819639205933, |
|
"learning_rate": 0.00019961789999589356, |
|
"loss": 2.0431, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07751147373788883, |
|
"grad_norm": 0.14796215295791626, |
|
"learning_rate": 0.00019958909190755187, |
|
"loss": 2.0363, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07955124936257012, |
|
"grad_norm": 0.17396488785743713, |
|
"learning_rate": 0.0001995592391767608, |
|
"loss": 2.0835, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0815910249872514, |
|
"grad_norm": 0.1914213001728058, |
|
"learning_rate": 0.0001995283421166614, |
|
"loss": 2.0242, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08363080061193269, |
|
"grad_norm": 0.16040053963661194, |
|
"learning_rate": 0.00019949640105134918, |
|
"loss": 2.2171, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08567057623661398, |
|
"grad_norm": 0.16291815042495728, |
|
"learning_rate": 0.00019946341631587087, |
|
"loss": 2.2144, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08771035186129526, |
|
"grad_norm": 0.16890890896320343, |
|
"learning_rate": 0.00019942938825622065, |
|
"loss": 2.0775, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08975012748597654, |
|
"grad_norm": 0.18898645043373108, |
|
"learning_rate": 0.0001993943172293368, |
|
"loss": 2.262, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09178990311065782, |
|
"grad_norm": 0.19105440378189087, |
|
"learning_rate": 0.00019935820360309777, |
|
"loss": 2.0232, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09382967873533911, |
|
"grad_norm": 0.17944768071174622, |
|
"learning_rate": 0.00019932104775631846, |
|
"loss": 2.2564, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0958694543600204, |
|
"grad_norm": 0.1691497266292572, |
|
"learning_rate": 0.0001992828500787461, |
|
"loss": 2.0249, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09790922998470168, |
|
"grad_norm": 0.18704521656036377, |
|
"learning_rate": 0.00019924361097105623, |
|
"loss": 2.2058, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09994900560938297, |
|
"grad_norm": 0.24133948981761932, |
|
"learning_rate": 0.00019920333084484857, |
|
"loss": 2.2835, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.10198878123406425, |
|
"grad_norm": 0.210649773478508, |
|
"learning_rate": 0.00019916201012264254, |
|
"loss": 2.1052, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10402855685874554, |
|
"grad_norm": 0.19624340534210205, |
|
"learning_rate": 0.00019911964923787295, |
|
"loss": 2.2474, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.10606833248342683, |
|
"grad_norm": 0.2529032230377197, |
|
"learning_rate": 0.0001990762486348855, |
|
"loss": 1.9782, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.10810810810810811, |
|
"grad_norm": 0.2078029066324234, |
|
"learning_rate": 0.00019903180876893194, |
|
"loss": 2.3627, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11014788373278939, |
|
"grad_norm": 0.23153568804264069, |
|
"learning_rate": 0.00019898633010616542, |
|
"loss": 1.9749, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.11218765935747067, |
|
"grad_norm": 0.20798815786838531, |
|
"learning_rate": 0.00019893981312363562, |
|
"loss": 2.111, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11422743498215196, |
|
"grad_norm": 0.20742167532444, |
|
"learning_rate": 0.00019889225830928365, |
|
"loss": 2.113, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.11626721060683325, |
|
"grad_norm": 0.21235893666744232, |
|
"learning_rate": 0.00019884366616193706, |
|
"loss": 2.0307, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11830698623151453, |
|
"grad_norm": 0.19754983484745026, |
|
"learning_rate": 0.0001987940371913044, |
|
"loss": 1.9883, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12034676185619582, |
|
"grad_norm": 0.20224173367023468, |
|
"learning_rate": 0.0001987433719179702, |
|
"loss": 2.0299, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1223865374808771, |
|
"grad_norm": 0.20431111752986908, |
|
"learning_rate": 0.00019869167087338907, |
|
"loss": 2.0, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12442631310555839, |
|
"grad_norm": 0.21355204284191132, |
|
"learning_rate": 0.00019863893459988062, |
|
"loss": 1.9365, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.12646608873023968, |
|
"grad_norm": 0.2394651174545288, |
|
"learning_rate": 0.00019858516365062334, |
|
"loss": 1.9264, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12850586435492095, |
|
"grad_norm": 0.20690159499645233, |
|
"learning_rate": 0.00019853035858964906, |
|
"loss": 1.9777, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13054563997960225, |
|
"grad_norm": 0.21029432117938995, |
|
"learning_rate": 0.00019847451999183694, |
|
"loss": 2.2063, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.13258541560428352, |
|
"grad_norm": 0.2483580857515335, |
|
"learning_rate": 0.00019841764844290744, |
|
"loss": 1.9817, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13462519122896482, |
|
"grad_norm": 0.2200578898191452, |
|
"learning_rate": 0.0001983597445394162, |
|
"loss": 1.8286, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1366649668536461, |
|
"grad_norm": 0.23194506764411926, |
|
"learning_rate": 0.00019830080888874778, |
|
"loss": 1.8918, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1387047424783274, |
|
"grad_norm": 0.2092316597700119, |
|
"learning_rate": 0.00019824084210910925, |
|
"loss": 2.0326, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.14074451810300867, |
|
"grad_norm": 0.226849764585495, |
|
"learning_rate": 0.00019817984482952376, |
|
"loss": 2.1387, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.14278429372768994, |
|
"grad_norm": 0.25519493222236633, |
|
"learning_rate": 0.0001981178176898239, |
|
"loss": 1.9261, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14482406935237124, |
|
"grad_norm": 0.2155541330575943, |
|
"learning_rate": 0.00019805476134064507, |
|
"loss": 2.2176, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.14686384497705252, |
|
"grad_norm": 0.23612210154533386, |
|
"learning_rate": 0.00019799067644341844, |
|
"loss": 2.1125, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.14890362060173382, |
|
"grad_norm": 0.2563931941986084, |
|
"learning_rate": 0.00019792556367036432, |
|
"loss": 1.9459, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1509433962264151, |
|
"grad_norm": 0.34834709763526917, |
|
"learning_rate": 0.0001978594237044849, |
|
"loss": 2.091, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1529831718510964, |
|
"grad_norm": 0.221855029463768, |
|
"learning_rate": 0.00019779225723955707, |
|
"loss": 1.8867, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.15502294747577766, |
|
"grad_norm": 0.2613975405693054, |
|
"learning_rate": 0.0001977240649801253, |
|
"loss": 1.9889, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.15706272310045896, |
|
"grad_norm": 0.3117937743663788, |
|
"learning_rate": 0.00019765484764149415, |
|
"loss": 2.0743, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.15910249872514023, |
|
"grad_norm": 0.23428964614868164, |
|
"learning_rate": 0.00019758460594972068, |
|
"loss": 2.1752, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1611422743498215, |
|
"grad_norm": 0.23182815313339233, |
|
"learning_rate": 0.00019751334064160706, |
|
"loss": 1.9315, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1631820499745028, |
|
"grad_norm": 0.3055015206336975, |
|
"learning_rate": 0.00019744105246469263, |
|
"loss": 2.0348, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16522182559918408, |
|
"grad_norm": 0.22985045611858368, |
|
"learning_rate": 0.00019736774217724614, |
|
"loss": 1.792, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.16726160122386538, |
|
"grad_norm": 0.2585189938545227, |
|
"learning_rate": 0.00019729341054825782, |
|
"loss": 2.0945, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.16930137684854665, |
|
"grad_norm": 0.2798707187175751, |
|
"learning_rate": 0.00019721805835743134, |
|
"loss": 2.0114, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.17134115247322795, |
|
"grad_norm": 0.2886582016944885, |
|
"learning_rate": 0.00019714168639517544, |
|
"loss": 2.0575, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.17338092809790923, |
|
"grad_norm": 0.2944013178348541, |
|
"learning_rate": 0.00019706429546259593, |
|
"loss": 2.1074, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.17542070372259053, |
|
"grad_norm": 0.318002313375473, |
|
"learning_rate": 0.00019698588637148703, |
|
"loss": 1.9883, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1774604793472718, |
|
"grad_norm": 0.256496787071228, |
|
"learning_rate": 0.00019690645994432305, |
|
"loss": 1.8408, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.17950025497195307, |
|
"grad_norm": 0.3593447208404541, |
|
"learning_rate": 0.0001968260170142496, |
|
"loss": 1.9313, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.18154003059663437, |
|
"grad_norm": 0.4645783305168152, |
|
"learning_rate": 0.00019674455842507492, |
|
"loss": 1.9448, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.18357980622131564, |
|
"grad_norm": 0.2802218198776245, |
|
"learning_rate": 0.00019666208503126112, |
|
"loss": 1.9091, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18561958184599694, |
|
"grad_norm": 0.39699026942253113, |
|
"learning_rate": 0.00019657859769791505, |
|
"loss": 1.9936, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.18765935747067822, |
|
"grad_norm": 0.4515025317668915, |
|
"learning_rate": 0.00019649409730077935, |
|
"loss": 2.2269, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.18969913309535952, |
|
"grad_norm": 0.24775496125221252, |
|
"learning_rate": 0.00019640858472622316, |
|
"loss": 1.8843, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1917389087200408, |
|
"grad_norm": 0.35014575719833374, |
|
"learning_rate": 0.00019632206087123296, |
|
"loss": 2.0159, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1937786843447221, |
|
"grad_norm": 0.4293341040611267, |
|
"learning_rate": 0.00019623452664340306, |
|
"loss": 2.1129, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.19581845996940336, |
|
"grad_norm": 0.249364972114563, |
|
"learning_rate": 0.000196145982960926, |
|
"loss": 2.1385, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.19785823559408466, |
|
"grad_norm": 0.3796793222427368, |
|
"learning_rate": 0.00019605643075258321, |
|
"loss": 1.9737, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.19989801121876594, |
|
"grad_norm": 0.41410332918167114, |
|
"learning_rate": 0.00019596587095773495, |
|
"loss": 1.895, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2019377868434472, |
|
"grad_norm": 0.2810049057006836, |
|
"learning_rate": 0.0001958743045263106, |
|
"loss": 1.9859, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2039775624681285, |
|
"grad_norm": 0.37417009472846985, |
|
"learning_rate": 0.00019578173241879872, |
|
"loss": 1.9447, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.20601733809280978, |
|
"grad_norm": 0.33988890051841736, |
|
"learning_rate": 0.0001956881556062369, |
|
"loss": 1.8362, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.20805711371749108, |
|
"grad_norm": 0.27952101826667786, |
|
"learning_rate": 0.00019559357507020162, |
|
"loss": 1.9745, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.21009688934217235, |
|
"grad_norm": 0.24870562553405762, |
|
"learning_rate": 0.00019549799180279792, |
|
"loss": 1.8926, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.21213666496685366, |
|
"grad_norm": 0.33430561423301697, |
|
"learning_rate": 0.00019540140680664913, |
|
"loss": 2.0914, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.21417644059153493, |
|
"grad_norm": 0.3203825056552887, |
|
"learning_rate": 0.0001953038210948861, |
|
"loss": 2.0238, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.21621621621621623, |
|
"grad_norm": 0.2822887897491455, |
|
"learning_rate": 0.00019520523569113677, |
|
"loss": 2.053, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2182559918408975, |
|
"grad_norm": 0.31055620312690735, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 1.8936, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.22029576746557877, |
|
"grad_norm": 0.3920172452926636, |
|
"learning_rate": 0.0001950050699546116, |
|
"loss": 2.1061, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.22233554309026007, |
|
"grad_norm": 0.2737603187561035, |
|
"learning_rate": 0.00019490349172147963, |
|
"loss": 2.0772, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.22437531871494135, |
|
"grad_norm": 0.27526190876960754, |
|
"learning_rate": 0.00019480091799562704, |
|
"loss": 1.9078, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22641509433962265, |
|
"grad_norm": 0.3213340938091278, |
|
"learning_rate": 0.00019469734985300371, |
|
"loss": 1.8632, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.22845486996430392, |
|
"grad_norm": 0.3409796953201294, |
|
"learning_rate": 0.00019459278837999046, |
|
"loss": 1.8264, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.23049464558898522, |
|
"grad_norm": 0.2637316882610321, |
|
"learning_rate": 0.00019448723467338763, |
|
"loss": 1.9274, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.2325344212136665, |
|
"grad_norm": 0.2738693952560425, |
|
"learning_rate": 0.00019438068984040365, |
|
"loss": 2.0925, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2345741968383478, |
|
"grad_norm": 0.43626368045806885, |
|
"learning_rate": 0.00019427315499864344, |
|
"loss": 1.9424, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.23661397246302907, |
|
"grad_norm": 0.3207686245441437, |
|
"learning_rate": 0.00019416463127609656, |
|
"loss": 1.8188, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.23865374808771037, |
|
"grad_norm": 0.28913381695747375, |
|
"learning_rate": 0.0001940551198111255, |
|
"loss": 1.9561, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.24069352371239164, |
|
"grad_norm": 0.3558366894721985, |
|
"learning_rate": 0.00019394462175245381, |
|
"loss": 1.8839, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2427332993370729, |
|
"grad_norm": 0.3230222463607788, |
|
"learning_rate": 0.0001938331382591537, |
|
"loss": 1.8197, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2447730749617542, |
|
"grad_norm": 0.31508007645606995, |
|
"learning_rate": 0.00019372067050063438, |
|
"loss": 2.0963, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24681285058643548, |
|
"grad_norm": 0.28743404150009155, |
|
"learning_rate": 0.00019360721965662933, |
|
"loss": 1.8382, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.24885262621111678, |
|
"grad_norm": 0.32556018233299255, |
|
"learning_rate": 0.00019349278691718427, |
|
"loss": 1.8255, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2508924018357981, |
|
"grad_norm": 0.3947349488735199, |
|
"learning_rate": 0.00019337737348264447, |
|
"loss": 2.1048, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.25293217746047936, |
|
"grad_norm": 0.280627965927124, |
|
"learning_rate": 0.00019326098056364222, |
|
"loss": 1.7557, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.25497195308516063, |
|
"grad_norm": 0.34356269240379333, |
|
"learning_rate": 0.00019314360938108425, |
|
"loss": 1.9692, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2570117287098419, |
|
"grad_norm": 0.30822035670280457, |
|
"learning_rate": 0.00019302526116613864, |
|
"loss": 1.9135, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.2590515043345232, |
|
"grad_norm": 0.28359460830688477, |
|
"learning_rate": 0.00019290593716022217, |
|
"loss": 2.0667, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2610912799592045, |
|
"grad_norm": 0.2782716751098633, |
|
"learning_rate": 0.00019278563861498723, |
|
"loss": 1.7958, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.2631310555838858, |
|
"grad_norm": 0.2571290135383606, |
|
"learning_rate": 0.00019266436679230865, |
|
"loss": 2.1198, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.26517083120856705, |
|
"grad_norm": 0.3336668908596039, |
|
"learning_rate": 0.00019254212296427044, |
|
"loss": 1.8137, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2672106068332483, |
|
"grad_norm": 0.23998981714248657, |
|
"learning_rate": 0.00019241890841315248, |
|
"loss": 2.0071, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.26925038245792965, |
|
"grad_norm": 0.3191507160663605, |
|
"learning_rate": 0.0001922947244314172, |
|
"loss": 2.0239, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2712901580826109, |
|
"grad_norm": 0.30696266889572144, |
|
"learning_rate": 0.0001921695723216957, |
|
"loss": 2.2377, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.2733299337072922, |
|
"grad_norm": 0.2688175439834595, |
|
"learning_rate": 0.00019204345339677442, |
|
"loss": 1.8686, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.27536970933197347, |
|
"grad_norm": 0.27408865094184875, |
|
"learning_rate": 0.00019191636897958122, |
|
"loss": 2.0253, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2774094849566548, |
|
"grad_norm": 0.2917419672012329, |
|
"learning_rate": 0.00019178832040317155, |
|
"loss": 1.905, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.27944926058133607, |
|
"grad_norm": 0.27972346544265747, |
|
"learning_rate": 0.0001916593090107143, |
|
"loss": 1.8001, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.28148903620601734, |
|
"grad_norm": 0.2744503915309906, |
|
"learning_rate": 0.00019152933615547798, |
|
"loss": 1.9057, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2835288118306986, |
|
"grad_norm": 0.27640461921691895, |
|
"learning_rate": 0.0001913984032008163, |
|
"loss": 2.0683, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2855685874553799, |
|
"grad_norm": 0.28741398453712463, |
|
"learning_rate": 0.00019126651152015403, |
|
"loss": 1.9567, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2876083630800612, |
|
"grad_norm": 0.31334587931632996, |
|
"learning_rate": 0.0001911336624969725, |
|
"loss": 1.8974, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2896481387047425, |
|
"grad_norm": 0.2697933614253998, |
|
"learning_rate": 0.00019099985752479506, |
|
"loss": 1.8786, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.29168791432942376, |
|
"grad_norm": 0.2917918562889099, |
|
"learning_rate": 0.00019086509800717258, |
|
"loss": 2.0443, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.29372768995410503, |
|
"grad_norm": 0.2908966541290283, |
|
"learning_rate": 0.00019072938535766865, |
|
"loss": 1.9817, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.29576746557878636, |
|
"grad_norm": 0.2819146513938904, |
|
"learning_rate": 0.0001905927209998447, |
|
"loss": 2.0689, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.29780724120346763, |
|
"grad_norm": 0.3112149238586426, |
|
"learning_rate": 0.0001904551063672452, |
|
"loss": 1.9045, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2998470168281489, |
|
"grad_norm": 0.2511192560195923, |
|
"learning_rate": 0.00019031654290338254, |
|
"loss": 2.007, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.3018867924528302, |
|
"grad_norm": 0.2944158911705017, |
|
"learning_rate": 0.00019017703206172185, |
|
"loss": 1.9059, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.30392656807751145, |
|
"grad_norm": 0.2799970209598541, |
|
"learning_rate": 0.0001900365753056659, |
|
"loss": 1.8268, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3059663437021928, |
|
"grad_norm": 0.3119431436061859, |
|
"learning_rate": 0.00018989517410853955, |
|
"loss": 1.7999, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.30800611932687405, |
|
"grad_norm": 0.27174267172813416, |
|
"learning_rate": 0.00018975282995357446, |
|
"loss": 1.8971, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3100458949515553, |
|
"grad_norm": 0.3147140443325043, |
|
"learning_rate": 0.00018960954433389345, |
|
"loss": 1.9755, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.3120856705762366, |
|
"grad_norm": 0.27466344833374023, |
|
"learning_rate": 0.00018946531875249493, |
|
"loss": 1.9819, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.3141254462009179, |
|
"grad_norm": 0.2912920117378235, |
|
"learning_rate": 0.00018932015472223693, |
|
"loss": 2.1079, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3161652218255992, |
|
"grad_norm": 0.3123255968093872, |
|
"learning_rate": 0.00018917405376582145, |
|
"loss": 1.93, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.31820499745028047, |
|
"grad_norm": 0.3270852565765381, |
|
"learning_rate": 0.0001890270174157784, |
|
"loss": 1.8386, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.32024477307496174, |
|
"grad_norm": 0.30445969104766846, |
|
"learning_rate": 0.00018887904721444953, |
|
"loss": 2.1993, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.322284548699643, |
|
"grad_norm": 0.2974015474319458, |
|
"learning_rate": 0.00018873014471397224, |
|
"loss": 1.8965, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.32432432432432434, |
|
"grad_norm": 0.3236542046070099, |
|
"learning_rate": 0.00018858031147626325, |
|
"loss": 2.041, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3263640999490056, |
|
"grad_norm": 0.2958833575248718, |
|
"learning_rate": 0.00018842954907300236, |
|
"loss": 1.8681, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3284038755736869, |
|
"grad_norm": 0.27163368463516235, |
|
"learning_rate": 0.00018827785908561584, |
|
"loss": 1.9664, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.33044365119836816, |
|
"grad_norm": 0.2662605941295624, |
|
"learning_rate": 0.0001881252431052599, |
|
"loss": 1.9255, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3324834268230495, |
|
"grad_norm": 0.2995011508464813, |
|
"learning_rate": 0.00018797170273280388, |
|
"loss": 2.1172, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.33452320244773076, |
|
"grad_norm": 0.2997836768627167, |
|
"learning_rate": 0.00018781723957881372, |
|
"loss": 1.9044, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.33656297807241203, |
|
"grad_norm": 0.2867211401462555, |
|
"learning_rate": 0.0001876618552635348, |
|
"loss": 1.8, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3386027536970933, |
|
"grad_norm": 0.2972771227359772, |
|
"learning_rate": 0.000187505551416875, |
|
"loss": 1.7879, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3406425293217746, |
|
"grad_norm": 0.27393755316734314, |
|
"learning_rate": 0.00018734832967838775, |
|
"loss": 2.1209, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3426823049464559, |
|
"grad_norm": 0.31422799825668335, |
|
"learning_rate": 0.00018719019169725472, |
|
"loss": 1.9435, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3447220805711372, |
|
"grad_norm": 0.3372005224227905, |
|
"learning_rate": 0.00018703113913226847, |
|
"loss": 2.0911, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.34676185619581845, |
|
"grad_norm": 0.29995280504226685, |
|
"learning_rate": 0.00018687117365181512, |
|
"loss": 1.9077, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3488016318204997, |
|
"grad_norm": 0.2770789861679077, |
|
"learning_rate": 0.0001867102969338569, |
|
"loss": 1.7649, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.35084140744518105, |
|
"grad_norm": 0.2982628047466278, |
|
"learning_rate": 0.00018654851066591448, |
|
"loss": 2.0258, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3528811830698623, |
|
"grad_norm": 0.2924087643623352, |
|
"learning_rate": 0.0001863858165450492, |
|
"loss": 1.9533, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3549209586945436, |
|
"grad_norm": 0.29398518800735474, |
|
"learning_rate": 0.0001862222162778454, |
|
"loss": 1.8298, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.35696073431922487, |
|
"grad_norm": 0.34897300601005554, |
|
"learning_rate": 0.00018605771158039253, |
|
"loss": 1.7459, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.35900050994390614, |
|
"grad_norm": 0.33509624004364014, |
|
"learning_rate": 0.00018589230417826697, |
|
"loss": 1.8843, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.36104028556858747, |
|
"grad_norm": 0.30635756254196167, |
|
"learning_rate": 0.00018572599580651415, |
|
"loss": 2.0315, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.36308006119326874, |
|
"grad_norm": 0.3544027805328369, |
|
"learning_rate": 0.00018555878820963013, |
|
"loss": 1.9821, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.36511983681795, |
|
"grad_norm": 0.283241331577301, |
|
"learning_rate": 0.00018539068314154354, |
|
"loss": 1.7482, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3671596124426313, |
|
"grad_norm": 0.2630005478858948, |
|
"learning_rate": 0.00018522168236559695, |
|
"loss": 1.887, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3691993880673126, |
|
"grad_norm": 0.2816585898399353, |
|
"learning_rate": 0.00018505178765452853, |
|
"loss": 1.8995, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3712391636919939, |
|
"grad_norm": 0.30843281745910645, |
|
"learning_rate": 0.00018488100079045344, |
|
"loss": 1.943, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.37327893931667516, |
|
"grad_norm": 0.30149805545806885, |
|
"learning_rate": 0.00018470932356484508, |
|
"loss": 1.8764, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.37531871494135643, |
|
"grad_norm": 0.27851638197898865, |
|
"learning_rate": 0.00018453675777851627, |
|
"loss": 1.8901, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.37735849056603776, |
|
"grad_norm": 0.34025222063064575, |
|
"learning_rate": 0.00018436330524160047, |
|
"loss": 2.167, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.37939826619071904, |
|
"grad_norm": 0.3183801472187042, |
|
"learning_rate": 0.0001841889677735327, |
|
"loss": 1.9419, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3814380418154003, |
|
"grad_norm": 0.3072781562805176, |
|
"learning_rate": 0.00018401374720303056, |
|
"loss": 1.9817, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3834778174400816, |
|
"grad_norm": 0.2894771993160248, |
|
"learning_rate": 0.00018383764536807485, |
|
"loss": 2.114, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.38551759306476285, |
|
"grad_norm": 0.3198698163032532, |
|
"learning_rate": 0.0001836606641158905, |
|
"loss": 1.8609, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3875573686894442, |
|
"grad_norm": 0.3462139964103699, |
|
"learning_rate": 0.00018348280530292713, |
|
"loss": 1.7753, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.38959714431412545, |
|
"grad_norm": 0.2793361246585846, |
|
"learning_rate": 0.00018330407079483952, |
|
"loss": 2.1784, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3916369199388067, |
|
"grad_norm": 0.29519417881965637, |
|
"learning_rate": 0.0001831244624664681, |
|
"loss": 1.9627, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.393676695563488, |
|
"grad_norm": 0.3317961096763611, |
|
"learning_rate": 0.00018294398220181917, |
|
"loss": 1.7643, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.3957164711881693, |
|
"grad_norm": 0.3138796091079712, |
|
"learning_rate": 0.0001827626318940454, |
|
"loss": 1.792, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3977562468128506, |
|
"grad_norm": 0.30172964930534363, |
|
"learning_rate": 0.00018258041344542566, |
|
"loss": 1.8828, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3997960224375319, |
|
"grad_norm": 0.3473678529262543, |
|
"learning_rate": 0.00018239732876734527, |
|
"loss": 1.9373, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.40183579806221315, |
|
"grad_norm": 0.3263239562511444, |
|
"learning_rate": 0.00018221337978027583, |
|
"loss": 1.9933, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.4038755736868944, |
|
"grad_norm": 0.32656848430633545, |
|
"learning_rate": 0.00018202856841375518, |
|
"loss": 1.8501, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.40591534931157575, |
|
"grad_norm": 0.3206334412097931, |
|
"learning_rate": 0.00018184289660636715, |
|
"loss": 2.0026, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.407955124936257, |
|
"grad_norm": 0.31711164116859436, |
|
"learning_rate": 0.0001816563663057211, |
|
"loss": 1.9573, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4099949005609383, |
|
"grad_norm": 0.2891688048839569, |
|
"learning_rate": 0.00018146897946843163, |
|
"loss": 1.939, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.41203467618561956, |
|
"grad_norm": 0.3304015100002289, |
|
"learning_rate": 0.000181280738060098, |
|
"loss": 1.874, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4140744518103009, |
|
"grad_norm": 0.2902586758136749, |
|
"learning_rate": 0.0001810916440552835, |
|
"loss": 1.8516, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.41611422743498216, |
|
"grad_norm": 0.3066134452819824, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 1.909, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.41815400305966344, |
|
"grad_norm": 0.2844925820827484, |
|
"learning_rate": 0.00018071090619916093, |
|
"loss": 1.8373, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.4201937786843447, |
|
"grad_norm": 0.2905077338218689, |
|
"learning_rate": 0.00018051926634161282, |
|
"loss": 1.8031, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.422233554309026, |
|
"grad_norm": 0.3110411763191223, |
|
"learning_rate": 0.00018032678187506187, |
|
"loss": 1.9915, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4242733299337073, |
|
"grad_norm": 0.27634483575820923, |
|
"learning_rate": 0.00018013345481857903, |
|
"loss": 2.0072, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4263131055583886, |
|
"grad_norm": 0.29069051146507263, |
|
"learning_rate": 0.0001799392872000736, |
|
"loss": 2.0632, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.42835288118306986, |
|
"grad_norm": 0.28966760635375977, |
|
"learning_rate": 0.00017974428105627208, |
|
"loss": 1.8184, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.43039265680775113, |
|
"grad_norm": 0.2768760621547699, |
|
"learning_rate": 0.00017954843843269664, |
|
"loss": 1.8344, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.43243243243243246, |
|
"grad_norm": 0.30277568101882935, |
|
"learning_rate": 0.0001793517613836437, |
|
"loss": 1.931, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.43447220805711373, |
|
"grad_norm": 0.30833378434181213, |
|
"learning_rate": 0.00017915425197216245, |
|
"loss": 1.8319, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.436511983681795, |
|
"grad_norm": 0.2517772316932678, |
|
"learning_rate": 0.00017895591227003315, |
|
"loss": 2.0335, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.4385517593064763, |
|
"grad_norm": 0.3051300346851349, |
|
"learning_rate": 0.00017875674435774547, |
|
"loss": 1.8523, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.44059153493115755, |
|
"grad_norm": 0.331875741481781, |
|
"learning_rate": 0.00017855675032447648, |
|
"loss": 1.902, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.4426313105558389, |
|
"grad_norm": 0.3102109134197235, |
|
"learning_rate": 0.00017835593226806903, |
|
"loss": 1.9391, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.44467108618052015, |
|
"grad_norm": 0.28581124544143677, |
|
"learning_rate": 0.00017815429229500946, |
|
"loss": 1.9595, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.4467108618052014, |
|
"grad_norm": 0.2874554693698883, |
|
"learning_rate": 0.00017795183252040567, |
|
"loss": 1.8683, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.4487506374298827, |
|
"grad_norm": 0.3131530284881592, |
|
"learning_rate": 0.00017774855506796496, |
|
"loss": 2.003, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.450790413054564, |
|
"grad_norm": 0.3006989359855652, |
|
"learning_rate": 0.0001775444620699715, |
|
"loss": 1.8058, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.4528301886792453, |
|
"grad_norm": 0.29621464014053345, |
|
"learning_rate": 0.0001773395556672644, |
|
"loss": 1.9781, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.45486996430392657, |
|
"grad_norm": 0.325095534324646, |
|
"learning_rate": 0.00017713383800921478, |
|
"loss": 1.8633, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.45690973992860784, |
|
"grad_norm": 0.306911826133728, |
|
"learning_rate": 0.00017692731125370354, |
|
"loss": 1.9318, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.4589495155532891, |
|
"grad_norm": 0.335014283657074, |
|
"learning_rate": 0.00017671997756709863, |
|
"loss": 1.7637, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.46098929117797044, |
|
"grad_norm": 0.28670719265937805, |
|
"learning_rate": 0.00017651183912423228, |
|
"loss": 1.988, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.4630290668026517, |
|
"grad_norm": 0.2969343364238739, |
|
"learning_rate": 0.00017630289810837834, |
|
"loss": 1.7984, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.465068842427333, |
|
"grad_norm": 0.3015127182006836, |
|
"learning_rate": 0.0001760931567112291, |
|
"loss": 1.7022, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.46710861805201426, |
|
"grad_norm": 0.32308632135391235, |
|
"learning_rate": 0.00017588261713287267, |
|
"loss": 1.8067, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4691483936766956, |
|
"grad_norm": 0.30818650126457214, |
|
"learning_rate": 0.00017567128158176953, |
|
"loss": 1.7096, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.47118816930137686, |
|
"grad_norm": 0.28488847613334656, |
|
"learning_rate": 0.00017545915227472965, |
|
"loss": 1.8784, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.47322794492605813, |
|
"grad_norm": 0.3801325857639313, |
|
"learning_rate": 0.00017524623143688902, |
|
"loss": 1.9555, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.4752677205507394, |
|
"grad_norm": 0.31661075353622437, |
|
"learning_rate": 0.00017503252130168657, |
|
"loss": 1.8717, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.47730749617542073, |
|
"grad_norm": 0.296003520488739, |
|
"learning_rate": 0.00017481802411084042, |
|
"loss": 1.7293, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.479347271800102, |
|
"grad_norm": 0.3210139274597168, |
|
"learning_rate": 0.0001746027421143246, |
|
"loss": 1.9587, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4813870474247833, |
|
"grad_norm": 0.2968738079071045, |
|
"learning_rate": 0.00017438667757034546, |
|
"loss": 2.0743, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.48342682304946455, |
|
"grad_norm": 0.3373945355415344, |
|
"learning_rate": 0.00017416983274531775, |
|
"loss": 1.8239, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4854665986741458, |
|
"grad_norm": 0.29180705547332764, |
|
"learning_rate": 0.0001739522099138411, |
|
"loss": 1.9563, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.48750637429882715, |
|
"grad_norm": 0.342316597700119, |
|
"learning_rate": 0.00017373381135867604, |
|
"loss": 1.9012, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.4895461499235084, |
|
"grad_norm": 0.29859068989753723, |
|
"learning_rate": 0.00017351463937072004, |
|
"loss": 1.723, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4915859255481897, |
|
"grad_norm": 0.32331928610801697, |
|
"learning_rate": 0.0001732946962489836, |
|
"loss": 1.9278, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.49362570117287097, |
|
"grad_norm": 0.3604521155357361, |
|
"learning_rate": 0.00017307398430056593, |
|
"loss": 1.9691, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.4956654767975523, |
|
"grad_norm": 0.30965346097946167, |
|
"learning_rate": 0.000172852505840631, |
|
"loss": 1.7225, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.49770525242223357, |
|
"grad_norm": 0.2769593894481659, |
|
"learning_rate": 0.00017263026319238301, |
|
"loss": 1.7938, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.49974502804691484, |
|
"grad_norm": 0.33169788122177124, |
|
"learning_rate": 0.00017240725868704218, |
|
"loss": 1.6576, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.49974502804691484, |
|
"eval_loss": 1.874290943145752, |
|
"eval_runtime": 49.8203, |
|
"eval_samples_per_second": 16.58, |
|
"eval_steps_per_second": 2.088, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5017848036715962, |
|
"grad_norm": 0.3088040053844452, |
|
"learning_rate": 0.00017218349466382023, |
|
"loss": 1.7977, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5038245792962774, |
|
"grad_norm": 0.3247360289096832, |
|
"learning_rate": 0.0001719589734698959, |
|
"loss": 1.9595, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5058643549209587, |
|
"grad_norm": 0.30020302534103394, |
|
"learning_rate": 0.00017173369746039025, |
|
"loss": 1.7303, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.50790413054564, |
|
"grad_norm": 0.2880503833293915, |
|
"learning_rate": 0.00017150766899834204, |
|
"loss": 1.9203, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5099439061703213, |
|
"grad_norm": 0.2775571048259735, |
|
"learning_rate": 0.00017128089045468294, |
|
"loss": 1.8347, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5119836817950025, |
|
"grad_norm": 0.31517794728279114, |
|
"learning_rate": 0.00017105336420821247, |
|
"loss": 1.9258, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5140234574196838, |
|
"grad_norm": 0.315845251083374, |
|
"learning_rate": 0.0001708250926455733, |
|
"loss": 1.6125, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5160632330443651, |
|
"grad_norm": 0.3020716607570648, |
|
"learning_rate": 0.00017059607816122618, |
|
"loss": 2.036, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.5181030086690463, |
|
"grad_norm": 0.3116091787815094, |
|
"learning_rate": 0.00017036632315742462, |
|
"loss": 1.7882, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5201427842937277, |
|
"grad_norm": 0.2845454216003418, |
|
"learning_rate": 0.00017013583004418993, |
|
"loss": 1.9165, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.522182559918409, |
|
"grad_norm": 0.3218507468700409, |
|
"learning_rate": 0.00016990460123928575, |
|
"loss": 1.9143, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5242223355430903, |
|
"grad_norm": 0.32271113991737366, |
|
"learning_rate": 0.00016967263916819287, |
|
"loss": 1.7233, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5262621111677716, |
|
"grad_norm": 0.32956361770629883, |
|
"learning_rate": 0.00016943994626408363, |
|
"loss": 2.1151, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5283018867924528, |
|
"grad_norm": 0.309712678194046, |
|
"learning_rate": 0.0001692065249677965, |
|
"loss": 1.7285, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5303416624171341, |
|
"grad_norm": 0.3099533021450043, |
|
"learning_rate": 0.00016897237772781044, |
|
"loss": 1.8461, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5323814380418154, |
|
"grad_norm": 0.2932775914669037, |
|
"learning_rate": 0.00016873750700021915, |
|
"loss": 1.869, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5344212136664966, |
|
"grad_norm": 0.2912542223930359, |
|
"learning_rate": 0.00016850191524870546, |
|
"loss": 1.7672, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.536460989291178, |
|
"grad_norm": 0.33142998814582825, |
|
"learning_rate": 0.00016826560494451537, |
|
"loss": 1.8723, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5385007649158593, |
|
"grad_norm": 0.26213347911834717, |
|
"learning_rate": 0.00016802857856643215, |
|
"loss": 1.6971, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 0.2854675352573395, |
|
"learning_rate": 0.00016779083860075033, |
|
"loss": 1.8517, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.5425803161652218, |
|
"grad_norm": 0.3256230354309082, |
|
"learning_rate": 0.00016755238754124965, |
|
"loss": 1.9159, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5446200917899031, |
|
"grad_norm": 0.30096253752708435, |
|
"learning_rate": 0.00016731322788916892, |
|
"loss": 1.7229, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.5466598674145844, |
|
"grad_norm": 0.2952723801136017, |
|
"learning_rate": 0.00016707336215317968, |
|
"loss": 1.8191, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.5486996430392657, |
|
"grad_norm": 0.32182615995407104, |
|
"learning_rate": 0.00016683279284936004, |
|
"loss": 1.9188, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.5507394186639469, |
|
"grad_norm": 0.3065682351589203, |
|
"learning_rate": 0.00016659152250116812, |
|
"loss": 1.5902, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5527791942886282, |
|
"grad_norm": 0.27141043543815613, |
|
"learning_rate": 0.00016634955363941574, |
|
"loss": 1.705, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.5548189699133096, |
|
"grad_norm": 0.3703926205635071, |
|
"learning_rate": 0.00016610688880224178, |
|
"loss": 1.9515, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.5568587455379909, |
|
"grad_norm": 0.3434322476387024, |
|
"learning_rate": 0.0001658635305350855, |
|
"loss": 1.6887, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.5588985211626721, |
|
"grad_norm": 0.2847291827201843, |
|
"learning_rate": 0.00016561948139065996, |
|
"loss": 2.0415, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.5609382967873534, |
|
"grad_norm": 0.27591603994369507, |
|
"learning_rate": 0.00016537474392892528, |
|
"loss": 1.8893, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.5629780724120347, |
|
"grad_norm": 0.32474270462989807, |
|
"learning_rate": 0.00016512932071706152, |
|
"loss": 1.8589, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.565017848036716, |
|
"grad_norm": 0.33138149976730347, |
|
"learning_rate": 0.0001648832143294422, |
|
"loss": 1.7978, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5670576236613972, |
|
"grad_norm": 0.321053683757782, |
|
"learning_rate": 0.0001646364273476067, |
|
"loss": 1.911, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5690973992860785, |
|
"grad_norm": 0.313310444355011, |
|
"learning_rate": 0.00016438896236023375, |
|
"loss": 1.7816, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.5711371749107598, |
|
"grad_norm": 0.2893736958503723, |
|
"learning_rate": 0.000164140821963114, |
|
"loss": 1.7197, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5731769505354412, |
|
"grad_norm": 0.3043624758720398, |
|
"learning_rate": 0.00016389200875912278, |
|
"loss": 1.7415, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5752167261601224, |
|
"grad_norm": 0.29861506819725037, |
|
"learning_rate": 0.00016364252535819282, |
|
"loss": 1.7317, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5772565017848037, |
|
"grad_norm": 0.3631903827190399, |
|
"learning_rate": 0.000163392374377287, |
|
"loss": 1.8116, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.579296277409485, |
|
"grad_norm": 0.32387158274650574, |
|
"learning_rate": 0.00016314155844037074, |
|
"loss": 1.8652, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5813360530341662, |
|
"grad_norm": 0.3442007899284363, |
|
"learning_rate": 0.00016289008017838445, |
|
"loss": 2.0466, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5833758286588475, |
|
"grad_norm": 0.3226166367530823, |
|
"learning_rate": 0.0001626379422292162, |
|
"loss": 1.8803, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5854156042835288, |
|
"grad_norm": 0.2765788733959198, |
|
"learning_rate": 0.00016238514723767374, |
|
"loss": 1.8048, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5874553799082101, |
|
"grad_norm": 0.26808875799179077, |
|
"learning_rate": 0.0001621316978554569, |
|
"loss": 1.9358, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5894951555328913, |
|
"grad_norm": 0.34248560667037964, |
|
"learning_rate": 0.00016187759674112973, |
|
"loss": 1.7614, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5915349311575727, |
|
"grad_norm": 0.3485107123851776, |
|
"learning_rate": 0.00016162284656009274, |
|
"loss": 1.7487, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.593574706782254, |
|
"grad_norm": 0.31153398752212524, |
|
"learning_rate": 0.00016136744998455476, |
|
"loss": 2.0895, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5956144824069353, |
|
"grad_norm": 0.3104468584060669, |
|
"learning_rate": 0.00016111140969350503, |
|
"loss": 1.6566, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5976542580316165, |
|
"grad_norm": 0.32697245478630066, |
|
"learning_rate": 0.00016085472837268502, |
|
"loss": 1.7631, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5996940336562978, |
|
"grad_norm": 0.3330870270729065, |
|
"learning_rate": 0.00016059740871456036, |
|
"loss": 1.6047, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6017338092809791, |
|
"grad_norm": 0.3144790828227997, |
|
"learning_rate": 0.00016033945341829248, |
|
"loss": 1.5975, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6037735849056604, |
|
"grad_norm": 0.32482099533081055, |
|
"learning_rate": 0.00016008086518971037, |
|
"loss": 1.9879, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6058133605303416, |
|
"grad_norm": 0.320336252450943, |
|
"learning_rate": 0.0001598216467412822, |
|
"loss": 1.9723, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6078531361550229, |
|
"grad_norm": 0.2934703528881073, |
|
"learning_rate": 0.00015956180079208682, |
|
"loss": 1.7771, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6098929117797043, |
|
"grad_norm": 0.3081587851047516, |
|
"learning_rate": 0.0001593013300677853, |
|
"loss": 1.869, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6119326874043856, |
|
"grad_norm": 0.30365437269210815, |
|
"learning_rate": 0.00015904023730059228, |
|
"loss": 1.795, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6139724630290668, |
|
"grad_norm": 0.3306022882461548, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 1.8786, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6160122386537481, |
|
"grad_norm": 0.3081189692020416, |
|
"learning_rate": 0.00015851619659898623, |
|
"loss": 1.8389, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.6180520142784294, |
|
"grad_norm": 0.3126586973667145, |
|
"learning_rate": 0.00015825325416151222, |
|
"loss": 1.8404, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6200917899031106, |
|
"grad_norm": 0.2832873463630676, |
|
"learning_rate": 0.000157989700674967, |
|
"loss": 1.9894, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6221315655277919, |
|
"grad_norm": 0.3080177903175354, |
|
"learning_rate": 0.00015772553890390197, |
|
"loss": 1.9813, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6241713411524732, |
|
"grad_norm": 0.30326464772224426, |
|
"learning_rate": 0.00015746077161924905, |
|
"loss": 1.7597, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6262111167771545, |
|
"grad_norm": 0.3071492910385132, |
|
"learning_rate": 0.00015719540159829184, |
|
"loss": 1.8149, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6282508924018358, |
|
"grad_norm": 0.3333245515823364, |
|
"learning_rate": 0.00015692943162463628, |
|
"loss": 1.9806, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.6302906680265171, |
|
"grad_norm": 0.2858702838420868, |
|
"learning_rate": 0.0001566628644881815, |
|
"loss": 1.7616, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6323304436511984, |
|
"grad_norm": 0.27514535188674927, |
|
"learning_rate": 0.00015639570298509064, |
|
"loss": 1.7516, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6343702192758797, |
|
"grad_norm": 0.29323917627334595, |
|
"learning_rate": 0.00015612794991776147, |
|
"loss": 1.8444, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6364099949005609, |
|
"grad_norm": 0.3227652907371521, |
|
"learning_rate": 0.00015585960809479696, |
|
"loss": 1.8715, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6384497705252422, |
|
"grad_norm": 0.2898751497268677, |
|
"learning_rate": 0.00015559068033097582, |
|
"loss": 1.9242, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6404895461499235, |
|
"grad_norm": 0.2990049421787262, |
|
"learning_rate": 0.00015532116944722308, |
|
"loss": 1.7412, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6425293217746048, |
|
"grad_norm": 0.28128162026405334, |
|
"learning_rate": 0.00015505107827058036, |
|
"loss": 1.7801, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.644569097399286, |
|
"grad_norm": 0.32478880882263184, |
|
"learning_rate": 0.0001547804096341763, |
|
"loss": 1.681, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6466088730239674, |
|
"grad_norm": 0.31724509596824646, |
|
"learning_rate": 0.00015450916637719684, |
|
"loss": 1.9405, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6486486486486487, |
|
"grad_norm": 0.36439332365989685, |
|
"learning_rate": 0.00015423735134485536, |
|
"loss": 1.7013, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.65068842427333, |
|
"grad_norm": 0.3085692226886749, |
|
"learning_rate": 0.00015396496738836292, |
|
"loss": 1.7641, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.6527281998980112, |
|
"grad_norm": 0.2774830758571625, |
|
"learning_rate": 0.0001536920173648984, |
|
"loss": 1.6727, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.6547679755226925, |
|
"grad_norm": 0.3212391138076782, |
|
"learning_rate": 0.0001534185041375783, |
|
"loss": 1.7221, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.6568077511473738, |
|
"grad_norm": 0.30237263441085815, |
|
"learning_rate": 0.00015314443057542703, |
|
"loss": 1.7456, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.658847526772055, |
|
"grad_norm": 0.2800453305244446, |
|
"learning_rate": 0.00015286979955334652, |
|
"loss": 1.7978, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.6608873023967363, |
|
"grad_norm": 0.28402194380760193, |
|
"learning_rate": 0.00015259461395208628, |
|
"loss": 1.8785, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.6629270780214176, |
|
"grad_norm": 0.3377262353897095, |
|
"learning_rate": 0.000152318876658213, |
|
"loss": 1.7113, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.664966853646099, |
|
"grad_norm": 0.376544326543808, |
|
"learning_rate": 0.00015204259056408046, |
|
"loss": 1.6237, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.6670066292707802, |
|
"grad_norm": 0.3138863742351532, |
|
"learning_rate": 0.00015176575856779904, |
|
"loss": 1.8357, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.6690464048954615, |
|
"grad_norm": 0.40277865529060364, |
|
"learning_rate": 0.00015148838357320537, |
|
"loss": 1.9038, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.6710861805201428, |
|
"grad_norm": 0.2998698055744171, |
|
"learning_rate": 0.0001512104684898319, |
|
"loss": 1.7583, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.6731259561448241, |
|
"grad_norm": 0.33020147681236267, |
|
"learning_rate": 0.00015093201623287631, |
|
"loss": 1.7446, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6751657317695053, |
|
"grad_norm": 0.3545725643634796, |
|
"learning_rate": 0.00015065302972317108, |
|
"loss": 1.8882, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.6772055073941866, |
|
"grad_norm": 0.2989177107810974, |
|
"learning_rate": 0.00015037351188715265, |
|
"loss": 1.7751, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.6792452830188679, |
|
"grad_norm": 0.3225865662097931, |
|
"learning_rate": 0.00015009346565683087, |
|
"loss": 1.8539, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6812850586435492, |
|
"grad_norm": 0.31957486271858215, |
|
"learning_rate": 0.00014981289396975817, |
|
"loss": 1.6078, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.6833248342682305, |
|
"grad_norm": 0.323512464761734, |
|
"learning_rate": 0.00014953179976899878, |
|
"loss": 1.9539, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6853646098929118, |
|
"grad_norm": 0.3451762795448303, |
|
"learning_rate": 0.00014925018600309785, |
|
"loss": 1.8435, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6874043855175931, |
|
"grad_norm": 0.3474057614803314, |
|
"learning_rate": 0.0001489680556260505, |
|
"loss": 1.8385, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6894441611422744, |
|
"grad_norm": 0.3119589686393738, |
|
"learning_rate": 0.00014868541159727096, |
|
"loss": 1.7582, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6914839367669556, |
|
"grad_norm": 0.3289264738559723, |
|
"learning_rate": 0.0001484022568815613, |
|
"loss": 1.8391, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6935237123916369, |
|
"grad_norm": 0.32644519209861755, |
|
"learning_rate": 0.00014811859444908052, |
|
"loss": 1.7676, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6955634880163182, |
|
"grad_norm": 0.2529926002025604, |
|
"learning_rate": 0.00014783442727531328, |
|
"loss": 1.6483, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6976032636409994, |
|
"grad_norm": 0.39409515261650085, |
|
"learning_rate": 0.00014754975834103877, |
|
"loss": 1.8951, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6996430392656807, |
|
"grad_norm": 0.312379390001297, |
|
"learning_rate": 0.00014726459063229945, |
|
"loss": 1.8388, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.7016828148903621, |
|
"grad_norm": 0.3048081696033478, |
|
"learning_rate": 0.00014697892714036958, |
|
"loss": 1.9482, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.7037225905150434, |
|
"grad_norm": 0.37214845418930054, |
|
"learning_rate": 0.00014669277086172406, |
|
"loss": 1.8466, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7057623661397247, |
|
"grad_norm": 0.3273617923259735, |
|
"learning_rate": 0.00014640612479800686, |
|
"loss": 1.5934, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.7078021417644059, |
|
"grad_norm": 0.3283670246601105, |
|
"learning_rate": 0.00014611899195599953, |
|
"loss": 1.7837, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.7098419173890872, |
|
"grad_norm": 0.38148233294487, |
|
"learning_rate": 0.00014583137534758967, |
|
"loss": 1.8952, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.7118816930137685, |
|
"grad_norm": 0.35573315620422363, |
|
"learning_rate": 0.0001455432779897395, |
|
"loss": 1.6698, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7139214686384497, |
|
"grad_norm": 0.362224280834198, |
|
"learning_rate": 0.00014525470290445392, |
|
"loss": 1.7698, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.715961244263131, |
|
"grad_norm": 0.3312818706035614, |
|
"learning_rate": 0.00014496565311874902, |
|
"loss": 1.7951, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.7180010198878123, |
|
"grad_norm": 0.3737257719039917, |
|
"learning_rate": 0.00014467613166462023, |
|
"loss": 1.9562, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.7200407955124937, |
|
"grad_norm": 0.37132444977760315, |
|
"learning_rate": 0.0001443861415790107, |
|
"loss": 1.9022, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.7220805711371749, |
|
"grad_norm": 0.30398017168045044, |
|
"learning_rate": 0.00014409568590377918, |
|
"loss": 2.0578, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7241203467618562, |
|
"grad_norm": 0.3135487139225006, |
|
"learning_rate": 0.00014380476768566824, |
|
"loss": 1.777, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7261601223865375, |
|
"grad_norm": 0.3089943826198578, |
|
"learning_rate": 0.00014351338997627234, |
|
"loss": 1.638, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.7281998980112188, |
|
"grad_norm": 0.2791310250759125, |
|
"learning_rate": 0.00014322155583200576, |
|
"loss": 1.7786, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.7302396736359, |
|
"grad_norm": 0.30986955761909485, |
|
"learning_rate": 0.00014292926831407061, |
|
"loss": 1.7467, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.7322794492605813, |
|
"grad_norm": 0.29783159494400024, |
|
"learning_rate": 0.0001426365304884246, |
|
"loss": 1.6486, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.7343192248852626, |
|
"grad_norm": 0.29579076170921326, |
|
"learning_rate": 0.00014234334542574906, |
|
"loss": 1.665, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.7363590005099439, |
|
"grad_norm": 0.31017670035362244, |
|
"learning_rate": 0.00014204971620141647, |
|
"loss": 1.7287, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.7383987761346252, |
|
"grad_norm": 0.34222882986068726, |
|
"learning_rate": 0.00014175564589545854, |
|
"loss": 1.7475, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.7404385517593065, |
|
"grad_norm": 0.2956671714782715, |
|
"learning_rate": 0.00014146113759253362, |
|
"loss": 1.757, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.7424783273839878, |
|
"grad_norm": 0.35265278816223145, |
|
"learning_rate": 0.0001411661943818944, |
|
"loss": 1.6731, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.744518103008669, |
|
"grad_norm": 0.3606823682785034, |
|
"learning_rate": 0.00014087081935735564, |
|
"loss": 1.8684, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7465578786333503, |
|
"grad_norm": 0.27310827374458313, |
|
"learning_rate": 0.00014057501561726157, |
|
"loss": 1.7656, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.7485976542580316, |
|
"grad_norm": 0.3064682185649872, |
|
"learning_rate": 0.0001402787862644534, |
|
"loss": 1.7646, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.7506374298827129, |
|
"grad_norm": 0.40648531913757324, |
|
"learning_rate": 0.0001399821344062369, |
|
"loss": 1.672, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7526772055073941, |
|
"grad_norm": 0.28727683424949646, |
|
"learning_rate": 0.00013968506315434974, |
|
"loss": 1.5766, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7547169811320755, |
|
"grad_norm": 0.30638012290000916, |
|
"learning_rate": 0.00013938757562492873, |
|
"loss": 1.6456, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.7567567567567568, |
|
"grad_norm": 0.3163774311542511, |
|
"learning_rate": 0.0001390896749384773, |
|
"loss": 1.8618, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.7587965323814381, |
|
"grad_norm": 0.36798760294914246, |
|
"learning_rate": 0.00013879136421983266, |
|
"loss": 2.1013, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.7608363080061193, |
|
"grad_norm": 0.30383849143981934, |
|
"learning_rate": 0.00013849264659813312, |
|
"loss": 1.7117, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.7628760836308006, |
|
"grad_norm": 0.35814371705055237, |
|
"learning_rate": 0.0001381935252067852, |
|
"loss": 1.9741, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.7649158592554819, |
|
"grad_norm": 0.29710692167282104, |
|
"learning_rate": 0.00013789400318343068, |
|
"loss": 1.5538, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.7669556348801632, |
|
"grad_norm": 0.30160966515541077, |
|
"learning_rate": 0.0001375940836699139, |
|
"loss": 1.7604, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.7689954105048444, |
|
"grad_norm": 0.28164952993392944, |
|
"learning_rate": 0.0001372937698122487, |
|
"loss": 1.736, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.7710351861295257, |
|
"grad_norm": 0.293729692697525, |
|
"learning_rate": 0.0001369930647605852, |
|
"loss": 1.9099, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.7730749617542071, |
|
"grad_norm": 0.28284940123558044, |
|
"learning_rate": 0.00013669197166917723, |
|
"loss": 1.7251, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.7751147373788884, |
|
"grad_norm": 0.26181384921073914, |
|
"learning_rate": 0.00013639049369634876, |
|
"loss": 1.6381, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7771545130035696, |
|
"grad_norm": 0.3301055431365967, |
|
"learning_rate": 0.00013608863400446113, |
|
"loss": 1.998, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.7791942886282509, |
|
"grad_norm": 0.3116731643676758, |
|
"learning_rate": 0.00013578639575987958, |
|
"loss": 1.8277, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.7812340642529322, |
|
"grad_norm": 0.30362340807914734, |
|
"learning_rate": 0.0001354837821329404, |
|
"loss": 1.888, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.7832738398776135, |
|
"grad_norm": 0.29050615429878235, |
|
"learning_rate": 0.00013518079629791724, |
|
"loss": 1.7116, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.7853136155022947, |
|
"grad_norm": 0.29679572582244873, |
|
"learning_rate": 0.00013487744143298822, |
|
"loss": 1.6066, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.787353391126976, |
|
"grad_norm": 0.3293478488922119, |
|
"learning_rate": 0.0001345737207202023, |
|
"loss": 1.7222, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.7893931667516573, |
|
"grad_norm": 0.34148016571998596, |
|
"learning_rate": 0.000134269637345446, |
|
"loss": 1.7243, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.7914329423763387, |
|
"grad_norm": 0.31480157375335693, |
|
"learning_rate": 0.00013396519449841005, |
|
"loss": 1.8812, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7934727180010199, |
|
"grad_norm": 0.2998044192790985, |
|
"learning_rate": 0.0001336603953725559, |
|
"loss": 1.9205, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7955124936257012, |
|
"grad_norm": 0.33707278966903687, |
|
"learning_rate": 0.00013335524316508208, |
|
"loss": 1.9246, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7975522692503825, |
|
"grad_norm": 0.33658990263938904, |
|
"learning_rate": 0.00013304974107689087, |
|
"loss": 1.849, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7995920448750637, |
|
"grad_norm": 0.2800214886665344, |
|
"learning_rate": 0.00013274389231255466, |
|
"loss": 1.7607, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.801631820499745, |
|
"grad_norm": 0.29528602957725525, |
|
"learning_rate": 0.00013243770008028224, |
|
"loss": 1.6146, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.8036715961244263, |
|
"grad_norm": 0.316123902797699, |
|
"learning_rate": 0.00013213116759188523, |
|
"loss": 1.7578, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.8057113717491076, |
|
"grad_norm": 0.339609295129776, |
|
"learning_rate": 0.0001318242980627444, |
|
"loss": 1.6968, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8077511473737888, |
|
"grad_norm": 0.29195457696914673, |
|
"learning_rate": 0.00013151709471177588, |
|
"loss": 1.6553, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.8097909229984702, |
|
"grad_norm": 0.3093162477016449, |
|
"learning_rate": 0.00013120956076139746, |
|
"loss": 1.6833, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.8118306986231515, |
|
"grad_norm": 0.343049019575119, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 1.717, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.8138704742478328, |
|
"grad_norm": 0.30858704447746277, |
|
"learning_rate": 0.0001305935139693874, |
|
"loss": 1.5495, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.815910249872514, |
|
"grad_norm": 0.28386467695236206, |
|
"learning_rate": 0.00013028500758979506, |
|
"loss": 1.8721, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8179500254971953, |
|
"grad_norm": 0.3261226713657379, |
|
"learning_rate": 0.00012997618353480377, |
|
"loss": 1.7956, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.8199898011218766, |
|
"grad_norm": 0.3289451003074646, |
|
"learning_rate": 0.00012966704504383168, |
|
"loss": 1.7322, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.8220295767465579, |
|
"grad_norm": 0.3098496198654175, |
|
"learning_rate": 0.00012935759535959528, |
|
"loss": 1.7224, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.8240693523712391, |
|
"grad_norm": 0.2830435335636139, |
|
"learning_rate": 0.00012904783772807533, |
|
"loss": 1.7597, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.8261091279959204, |
|
"grad_norm": 0.3040255010128021, |
|
"learning_rate": 0.00012873777539848283, |
|
"loss": 1.8111, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.8281489036206018, |
|
"grad_norm": 0.30722421407699585, |
|
"learning_rate": 0.00012842741162322487, |
|
"loss": 1.6308, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.8301886792452831, |
|
"grad_norm": 0.28365135192871094, |
|
"learning_rate": 0.00012811674965787056, |
|
"loss": 1.932, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.8322284548699643, |
|
"grad_norm": 0.26628825068473816, |
|
"learning_rate": 0.00012780579276111702, |
|
"loss": 1.5047, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.8342682304946456, |
|
"grad_norm": 0.3449050486087799, |
|
"learning_rate": 0.00012749454419475487, |
|
"loss": 1.9364, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.8363080061193269, |
|
"grad_norm": 0.2961723208427429, |
|
"learning_rate": 0.0001271830072236343, |
|
"loss": 1.6638, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.8383477817440081, |
|
"grad_norm": 0.2808418273925781, |
|
"learning_rate": 0.00012687118511563075, |
|
"loss": 1.6743, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.8403875573686894, |
|
"grad_norm": 0.30065590143203735, |
|
"learning_rate": 0.0001265590811416105, |
|
"loss": 1.8681, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.8424273329933707, |
|
"grad_norm": 0.311669260263443, |
|
"learning_rate": 0.0001262466985753967, |
|
"loss": 1.7563, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.844467108618052, |
|
"grad_norm": 0.31335267424583435, |
|
"learning_rate": 0.0001259340406937345, |
|
"loss": 1.9591, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.8465068842427333, |
|
"grad_norm": 0.306470662355423, |
|
"learning_rate": 0.00012562111077625722, |
|
"loss": 1.7165, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.8485466598674146, |
|
"grad_norm": 0.278089314699173, |
|
"learning_rate": 0.00012530791210545162, |
|
"loss": 1.7441, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.8505864354920959, |
|
"grad_norm": 0.28886333107948303, |
|
"learning_rate": 0.00012499444796662353, |
|
"loss": 1.7301, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.8526262111167772, |
|
"grad_norm": 0.31261351704597473, |
|
"learning_rate": 0.0001246807216478634, |
|
"loss": 2.062, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.8546659867414584, |
|
"grad_norm": 0.3184990882873535, |
|
"learning_rate": 0.00012436673644001197, |
|
"loss": 2.0039, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.8567057623661397, |
|
"grad_norm": 0.28797486424446106, |
|
"learning_rate": 0.00012405249563662537, |
|
"loss": 1.5982, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.858745537990821, |
|
"grad_norm": 0.32951340079307556, |
|
"learning_rate": 0.00012373800253394102, |
|
"loss": 1.6121, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8607853136155023, |
|
"grad_norm": 0.28868240118026733, |
|
"learning_rate": 0.00012342326043084266, |
|
"loss": 1.7456, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8628250892401835, |
|
"grad_norm": 0.3051716387271881, |
|
"learning_rate": 0.00012310827262882615, |
|
"loss": 1.7698, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.8648648648648649, |
|
"grad_norm": 0.3326285183429718, |
|
"learning_rate": 0.00012279304243196436, |
|
"loss": 1.7834, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.8669046404895462, |
|
"grad_norm": 0.3435324430465698, |
|
"learning_rate": 0.00012247757314687297, |
|
"loss": 1.7208, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.8689444161142275, |
|
"grad_norm": 0.35181501507759094, |
|
"learning_rate": 0.00012216186808267546, |
|
"loss": 1.7401, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.8709841917389087, |
|
"grad_norm": 0.30371034145355225, |
|
"learning_rate": 0.00012184593055096854, |
|
"loss": 1.655, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.87302396736359, |
|
"grad_norm": 0.29926905035972595, |
|
"learning_rate": 0.0001215297638657875, |
|
"loss": 1.686, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.8750637429882713, |
|
"grad_norm": 0.32623806595802307, |
|
"learning_rate": 0.0001212133713435712, |
|
"loss": 1.6556, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.8771035186129525, |
|
"grad_norm": 0.32232004404067993, |
|
"learning_rate": 0.00012089675630312754, |
|
"loss": 1.7736, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.8791432942376338, |
|
"grad_norm": 0.3392092287540436, |
|
"learning_rate": 0.00012057992206559837, |
|
"loss": 1.8754, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.8811830698623151, |
|
"grad_norm": 0.34549203515052795, |
|
"learning_rate": 0.00012026287195442503, |
|
"loss": 1.7624, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.8832228454869965, |
|
"grad_norm": 0.3612309694290161, |
|
"learning_rate": 0.00011994560929531309, |
|
"loss": 1.6408, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.8852626211116777, |
|
"grad_norm": 0.30541735887527466, |
|
"learning_rate": 0.00011962813741619777, |
|
"loss": 1.6785, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.887302396736359, |
|
"grad_norm": 0.2894171476364136, |
|
"learning_rate": 0.00011931045964720881, |
|
"loss": 1.6609, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.8893421723610403, |
|
"grad_norm": 0.31241005659103394, |
|
"learning_rate": 0.0001189925793206357, |
|
"loss": 1.7421, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.8913819479857216, |
|
"grad_norm": 0.36774688959121704, |
|
"learning_rate": 0.00011867449977089265, |
|
"loss": 1.902, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.8934217236104028, |
|
"grad_norm": 0.3658794164657593, |
|
"learning_rate": 0.00011835622433448361, |
|
"loss": 1.9666, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.8954614992350841, |
|
"grad_norm": 0.33153945207595825, |
|
"learning_rate": 0.00011803775634996734, |
|
"loss": 1.6772, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.8975012748597654, |
|
"grad_norm": 0.34272125363349915, |
|
"learning_rate": 0.0001177190991579223, |
|
"loss": 1.7911, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8995410504844467, |
|
"grad_norm": 0.3852793276309967, |
|
"learning_rate": 0.00011740025610091159, |
|
"loss": 1.7358, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.901580826109128, |
|
"grad_norm": 0.33693379163742065, |
|
"learning_rate": 0.00011708123052344804, |
|
"loss": 1.7078, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.9036206017338093, |
|
"grad_norm": 0.35405832529067993, |
|
"learning_rate": 0.00011676202577195901, |
|
"loss": 1.6454, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.9056603773584906, |
|
"grad_norm": 0.38638654351234436, |
|
"learning_rate": 0.0001164426451947513, |
|
"loss": 1.7611, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.9077001529831719, |
|
"grad_norm": 0.3415047824382782, |
|
"learning_rate": 0.00011612309214197599, |
|
"loss": 1.6206, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9097399286078531, |
|
"grad_norm": 0.3022047281265259, |
|
"learning_rate": 0.00011580336996559343, |
|
"loss": 1.8017, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.9117797042325344, |
|
"grad_norm": 0.3650202751159668, |
|
"learning_rate": 0.00011548348201933798, |
|
"loss": 1.6507, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.9138194798572157, |
|
"grad_norm": 0.44297119975090027, |
|
"learning_rate": 0.00011516343165868279, |
|
"loss": 1.8607, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.915859255481897, |
|
"grad_norm": 0.3508884906768799, |
|
"learning_rate": 0.00011484322224080472, |
|
"loss": 1.6549, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.9178990311065782, |
|
"grad_norm": 0.31238260865211487, |
|
"learning_rate": 0.00011452285712454904, |
|
"loss": 1.603, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.9199388067312596, |
|
"grad_norm": 0.36678579449653625, |
|
"learning_rate": 0.00011420233967039422, |
|
"loss": 1.7984, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.9219785823559409, |
|
"grad_norm": 0.3467310965061188, |
|
"learning_rate": 0.00011388167324041669, |
|
"loss": 1.7691, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.9240183579806222, |
|
"grad_norm": 0.2987074553966522, |
|
"learning_rate": 0.00011356086119825553, |
|
"loss": 1.617, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.9260581336053034, |
|
"grad_norm": 0.2808781862258911, |
|
"learning_rate": 0.00011323990690907733, |
|
"loss": 1.6362, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.9280979092299847, |
|
"grad_norm": 0.356692910194397, |
|
"learning_rate": 0.00011291881373954065, |
|
"loss": 1.7279, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.930137684854666, |
|
"grad_norm": 0.3478505611419678, |
|
"learning_rate": 0.00011259758505776092, |
|
"loss": 1.6011, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.9321774604793472, |
|
"grad_norm": 0.3229358196258545, |
|
"learning_rate": 0.00011227622423327502, |
|
"loss": 1.7874, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.9342172361040285, |
|
"grad_norm": 0.30234581232070923, |
|
"learning_rate": 0.0001119547346370059, |
|
"loss": 1.6098, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.9362570117287098, |
|
"grad_norm": 0.356154203414917, |
|
"learning_rate": 0.00011163311964122734, |
|
"loss": 1.8164, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.9382967873533912, |
|
"grad_norm": 0.35306453704833984, |
|
"learning_rate": 0.00011131138261952845, |
|
"loss": 1.8833, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.9403365629780724, |
|
"grad_norm": 0.29947206377983093, |
|
"learning_rate": 0.00011098952694677829, |
|
"loss": 1.6675, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.9423763386027537, |
|
"grad_norm": 0.3290257155895233, |
|
"learning_rate": 0.00011066755599909064, |
|
"loss": 1.7463, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.944416114227435, |
|
"grad_norm": 0.32332712411880493, |
|
"learning_rate": 0.00011034547315378838, |
|
"loss": 1.5748, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.9464558898521163, |
|
"grad_norm": 0.40129172801971436, |
|
"learning_rate": 0.00011002328178936811, |
|
"loss": 1.6484, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.9484956654767975, |
|
"grad_norm": 0.34289342164993286, |
|
"learning_rate": 0.00010970098528546481, |
|
"loss": 1.7533, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.9505354411014788, |
|
"grad_norm": 0.30435630679130554, |
|
"learning_rate": 0.00010937858702281631, |
|
"loss": 1.8037, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.9525752167261601, |
|
"grad_norm": 0.29155558347702026, |
|
"learning_rate": 0.00010905609038322779, |
|
"loss": 1.5628, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.9546149923508415, |
|
"grad_norm": 0.31822967529296875, |
|
"learning_rate": 0.0001087334987495364, |
|
"loss": 1.7222, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.9566547679755227, |
|
"grad_norm": 0.2868748605251312, |
|
"learning_rate": 0.00010841081550557578, |
|
"loss": 1.86, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.958694543600204, |
|
"grad_norm": 0.3066910207271576, |
|
"learning_rate": 0.00010808804403614043, |
|
"loss": 1.7278, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9607343192248853, |
|
"grad_norm": 0.28570494055747986, |
|
"learning_rate": 0.00010776518772695034, |
|
"loss": 1.7328, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.9627740948495666, |
|
"grad_norm": 0.28472283482551575, |
|
"learning_rate": 0.0001074422499646154, |
|
"loss": 1.7311, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.9648138704742478, |
|
"grad_norm": 0.30987489223480225, |
|
"learning_rate": 0.00010711923413659995, |
|
"loss": 1.7628, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.9668536460989291, |
|
"grad_norm": 0.32321545481681824, |
|
"learning_rate": 0.00010679614363118717, |
|
"loss": 1.8949, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.9688934217236104, |
|
"grad_norm": 0.32413583993911743, |
|
"learning_rate": 0.00010647298183744359, |
|
"loss": 1.6379, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.9709331973482916, |
|
"grad_norm": 0.3008846044540405, |
|
"learning_rate": 0.0001061497521451835, |
|
"loss": 1.7546, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.972972972972973, |
|
"grad_norm": 0.3248610198497772, |
|
"learning_rate": 0.00010582645794493337, |
|
"loss": 1.8915, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.9750127485976543, |
|
"grad_norm": 0.3182532489299774, |
|
"learning_rate": 0.00010550310262789649, |
|
"loss": 1.8771, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.9770525242223356, |
|
"grad_norm": 0.317730188369751, |
|
"learning_rate": 0.00010517968958591705, |
|
"loss": 1.7348, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.9790922998470168, |
|
"grad_norm": 0.3585616648197174, |
|
"learning_rate": 0.00010485622221144484, |
|
"loss": 1.6916, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.9811320754716981, |
|
"grad_norm": 0.2927946150302887, |
|
"learning_rate": 0.00010453270389749957, |
|
"loss": 1.7284, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.9831718510963794, |
|
"grad_norm": 0.2869007885456085, |
|
"learning_rate": 0.00010420913803763521, |
|
"loss": 1.7312, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.9852116267210607, |
|
"grad_norm": 0.34274759888648987, |
|
"learning_rate": 0.00010388552802590462, |
|
"loss": 1.6405, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.9872514023457419, |
|
"grad_norm": 0.3203391134738922, |
|
"learning_rate": 0.00010356187725682359, |
|
"loss": 1.666, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.9892911779704232, |
|
"grad_norm": 0.3370297849178314, |
|
"learning_rate": 0.00010323818912533561, |
|
"loss": 1.757, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.9913309535951046, |
|
"grad_norm": 0.3095087707042694, |
|
"learning_rate": 0.00010291446702677599, |
|
"loss": 1.7015, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.9933707292197859, |
|
"grad_norm": 0.33249640464782715, |
|
"learning_rate": 0.00010259071435683636, |
|
"loss": 1.9363, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.9954105048444671, |
|
"grad_norm": 0.3441324830055237, |
|
"learning_rate": 0.000102266934511529, |
|
"loss": 1.5975, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.9974502804691484, |
|
"grad_norm": 0.31439101696014404, |
|
"learning_rate": 0.00010194313088715135, |
|
"loss": 1.7025, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.9994900560938297, |
|
"grad_norm": 0.34762027859687805, |
|
"learning_rate": 0.00010161930688025017, |
|
"loss": 1.7399, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9994900560938297, |
|
"eval_loss": 1.752766489982605, |
|
"eval_runtime": 57.5155, |
|
"eval_samples_per_second": 14.361, |
|
"eval_steps_per_second": 1.808, |
|
"step": 490 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 980, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 490, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.735357947750646e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|