|
{ |
|
"best_global_step": 1500, |
|
"best_metric": 0.7986094355583191, |
|
"best_model_checkpoint": "./biomistral-lora-finetuned/checkpoint-1500", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 2772, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.010832769126607989, |
|
"grad_norm": 0.7727395296096802, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.889, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.021665538253215978, |
|
"grad_norm": 0.8008129596710205, |
|
"learning_rate": 3.8e-05, |
|
"loss": 0.8378, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03249830737982397, |
|
"grad_norm": 0.9147247076034546, |
|
"learning_rate": 5.8e-05, |
|
"loss": 0.8108, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.043331076506431955, |
|
"grad_norm": 0.8121607303619385, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 0.8597, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05416384563303995, |
|
"grad_norm": 1.0018593072891235, |
|
"learning_rate": 9.8e-05, |
|
"loss": 0.7486, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06499661475964794, |
|
"grad_norm": 1.2048218250274658, |
|
"learning_rate": 0.000118, |
|
"loss": 0.6825, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07582938388625593, |
|
"grad_norm": 0.9863468408584595, |
|
"learning_rate": 0.000138, |
|
"loss": 0.6539, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08666215301286391, |
|
"grad_norm": 1.2911494970321655, |
|
"learning_rate": 0.00015800000000000002, |
|
"loss": 0.6198, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0974949221394719, |
|
"grad_norm": 1.159672737121582, |
|
"learning_rate": 0.00017800000000000002, |
|
"loss": 0.6222, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1083276912660799, |
|
"grad_norm": 1.0924432277679443, |
|
"learning_rate": 0.00019800000000000002, |
|
"loss": 0.5923, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11916046039268788, |
|
"grad_norm": 1.3423463106155396, |
|
"learning_rate": 0.00019932634730538925, |
|
"loss": 0.5548, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12999322951929587, |
|
"grad_norm": 1.4929102659225464, |
|
"learning_rate": 0.00019857784431137723, |
|
"loss": 0.6701, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.14082599864590387, |
|
"grad_norm": 0.9462954998016357, |
|
"learning_rate": 0.00019782934131736527, |
|
"loss": 0.8675, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.15165876777251186, |
|
"grad_norm": 0.9912289977073669, |
|
"learning_rate": 0.0001970808383233533, |
|
"loss": 0.9074, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.16249153689911983, |
|
"grad_norm": 1.1070538759231567, |
|
"learning_rate": 0.00019633233532934132, |
|
"loss": 0.8755, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.17332430602572782, |
|
"grad_norm": 0.9465340375900269, |
|
"learning_rate": 0.00019558383233532936, |
|
"loss": 0.882, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.18415707515233581, |
|
"grad_norm": 0.8657329678535461, |
|
"learning_rate": 0.00019483532934131737, |
|
"loss": 0.8737, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1949898442789438, |
|
"grad_norm": 0.7293577790260315, |
|
"learning_rate": 0.0001940868263473054, |
|
"loss": 0.8473, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.2058226134055518, |
|
"grad_norm": 0.849353551864624, |
|
"learning_rate": 0.00019333832335329343, |
|
"loss": 0.9414, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2166553825321598, |
|
"grad_norm": 0.7525314688682556, |
|
"learning_rate": 0.00019258982035928144, |
|
"loss": 0.8852, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22748815165876776, |
|
"grad_norm": 1.0732208490371704, |
|
"learning_rate": 0.00019184131736526948, |
|
"loss": 0.8074, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23832092078537576, |
|
"grad_norm": 0.8420374393463135, |
|
"learning_rate": 0.0001910928143712575, |
|
"loss": 0.9508, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24915368991198375, |
|
"grad_norm": 0.8308244347572327, |
|
"learning_rate": 0.0001903443113772455, |
|
"loss": 0.8734, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.25998645903859174, |
|
"grad_norm": 0.9915153384208679, |
|
"learning_rate": 0.00018959580838323354, |
|
"loss": 0.8816, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2708192281651997, |
|
"grad_norm": 4.8621978759765625, |
|
"learning_rate": 0.00018884730538922158, |
|
"loss": 0.8848, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.28165199729180773, |
|
"grad_norm": 0.7945590019226074, |
|
"learning_rate": 0.0001880988023952096, |
|
"loss": 0.8503, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2924847664184157, |
|
"grad_norm": 0.7896672487258911, |
|
"learning_rate": 0.00018735029940119763, |
|
"loss": 0.8798, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.3033175355450237, |
|
"grad_norm": 0.8870701789855957, |
|
"learning_rate": 0.00018660179640718564, |
|
"loss": 0.9112, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3141503046716317, |
|
"grad_norm": 0.9003740549087524, |
|
"learning_rate": 0.00018585329341317365, |
|
"loss": 0.846, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.32498307379823965, |
|
"grad_norm": 0.7067676186561584, |
|
"learning_rate": 0.0001851047904191617, |
|
"loss": 0.8588, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3358158429248477, |
|
"grad_norm": 0.9696246385574341, |
|
"learning_rate": 0.0001843562874251497, |
|
"loss": 0.8244, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.34664861205145564, |
|
"grad_norm": 0.9892609715461731, |
|
"learning_rate": 0.00018360778443113774, |
|
"loss": 0.8214, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.35748138117806366, |
|
"grad_norm": 0.822260856628418, |
|
"learning_rate": 0.00018285928143712575, |
|
"loss": 0.7977, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.36831415030467163, |
|
"grad_norm": 0.7743964791297913, |
|
"learning_rate": 0.00018211077844311376, |
|
"loss": 0.8002, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3791469194312796, |
|
"grad_norm": 0.7090775370597839, |
|
"learning_rate": 0.0001813622754491018, |
|
"loss": 0.8192, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3899796885578876, |
|
"grad_norm": 1.0970802307128906, |
|
"learning_rate": 0.00018061377245508984, |
|
"loss": 0.8516, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4008124576844956, |
|
"grad_norm": 0.9633163213729858, |
|
"learning_rate": 0.00017986526946107785, |
|
"loss": 0.8414, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4116452268111036, |
|
"grad_norm": 0.6846926808357239, |
|
"learning_rate": 0.00017911676646706587, |
|
"loss": 0.8187, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.42247799593771157, |
|
"grad_norm": 0.7262110710144043, |
|
"learning_rate": 0.0001783682634730539, |
|
"loss": 0.8572, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4333107650643196, |
|
"grad_norm": 0.8537372350692749, |
|
"learning_rate": 0.00017761976047904192, |
|
"loss": 0.8286, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.44414353419092756, |
|
"grad_norm": 0.8860271573066711, |
|
"learning_rate": 0.00017687125748502996, |
|
"loss": 0.8416, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4549763033175355, |
|
"grad_norm": 0.7984218597412109, |
|
"learning_rate": 0.000176122754491018, |
|
"loss": 0.8373, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.46580907244414355, |
|
"grad_norm": 0.8060943484306335, |
|
"learning_rate": 0.000175374251497006, |
|
"loss": 0.9165, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4766418415707515, |
|
"grad_norm": 0.7871391177177429, |
|
"learning_rate": 0.00017462574850299402, |
|
"loss": 0.8276, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.48747461069735953, |
|
"grad_norm": 0.7732688784599304, |
|
"learning_rate": 0.00017387724550898203, |
|
"loss": 0.8346, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4983073798239675, |
|
"grad_norm": 0.9314000606536865, |
|
"learning_rate": 0.00017312874251497007, |
|
"loss": 0.8291, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5091401489505755, |
|
"grad_norm": 0.6721988916397095, |
|
"learning_rate": 0.0001723802395209581, |
|
"loss": 0.7091, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5199729180771835, |
|
"grad_norm": 0.825965940952301, |
|
"learning_rate": 0.00017163173652694612, |
|
"loss": 0.8934, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5308056872037915, |
|
"grad_norm": 0.8427668213844299, |
|
"learning_rate": 0.00017088323353293413, |
|
"loss": 0.7603, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5416384563303994, |
|
"grad_norm": 1.0061259269714355, |
|
"learning_rate": 0.00017013473053892217, |
|
"loss": 0.8277, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5416384563303994, |
|
"eval_loss": 0.8331602811813354, |
|
"eval_runtime": 355.9061, |
|
"eval_samples_per_second": 4.614, |
|
"eval_steps_per_second": 2.307, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5524712254570074, |
|
"grad_norm": 0.8820628523826599, |
|
"learning_rate": 0.00016938622754491018, |
|
"loss": 0.8348, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5633039945836155, |
|
"grad_norm": 0.8095284700393677, |
|
"learning_rate": 0.00016863772455089822, |
|
"loss": 0.9172, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5741367637102234, |
|
"grad_norm": 0.6959540843963623, |
|
"learning_rate": 0.00016788922155688623, |
|
"loss": 0.838, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5849695328368314, |
|
"grad_norm": 0.835831880569458, |
|
"learning_rate": 0.00016714071856287424, |
|
"loss": 0.8887, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5958023019634394, |
|
"grad_norm": 0.9289611577987671, |
|
"learning_rate": 0.00016639221556886228, |
|
"loss": 0.8514, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6066350710900474, |
|
"grad_norm": 0.6904628872871399, |
|
"learning_rate": 0.00016564371257485032, |
|
"loss": 0.8645, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6174678402166554, |
|
"grad_norm": 0.8879178762435913, |
|
"learning_rate": 0.00016489520958083833, |
|
"loss": 0.8201, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6283006093432634, |
|
"grad_norm": 0.8411425948143005, |
|
"learning_rate": 0.00016414670658682637, |
|
"loss": 0.836, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.6391333784698714, |
|
"grad_norm": 0.8564555644989014, |
|
"learning_rate": 0.00016339820359281436, |
|
"loss": 0.7724, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6499661475964793, |
|
"grad_norm": 0.8382830619812012, |
|
"learning_rate": 0.0001626497005988024, |
|
"loss": 0.7839, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6607989167230873, |
|
"grad_norm": 0.7657437920570374, |
|
"learning_rate": 0.00016190119760479043, |
|
"loss": 0.7973, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6716316858496953, |
|
"grad_norm": 0.7758445143699646, |
|
"learning_rate": 0.00016115269461077845, |
|
"loss": 0.8111, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6824644549763034, |
|
"grad_norm": 1.0041533708572388, |
|
"learning_rate": 0.00016040419161676649, |
|
"loss": 0.8359, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6932972241029113, |
|
"grad_norm": 0.9679577946662903, |
|
"learning_rate": 0.0001596556886227545, |
|
"loss": 0.8822, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.7041299932295193, |
|
"grad_norm": 0.8141391277313232, |
|
"learning_rate": 0.0001589071856287425, |
|
"loss": 0.8714, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7149627623561273, |
|
"grad_norm": 0.7982810139656067, |
|
"learning_rate": 0.00015815868263473055, |
|
"loss": 0.856, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.7257955314827352, |
|
"grad_norm": 0.7932000160217285, |
|
"learning_rate": 0.00015741017964071859, |
|
"loss": 0.8405, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7366283006093433, |
|
"grad_norm": 0.7269508242607117, |
|
"learning_rate": 0.0001566616766467066, |
|
"loss": 0.8371, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.7474610697359513, |
|
"grad_norm": 0.9001722931861877, |
|
"learning_rate": 0.0001559131736526946, |
|
"loss": 0.8305, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7582938388625592, |
|
"grad_norm": 0.6795508861541748, |
|
"learning_rate": 0.00015516467065868262, |
|
"loss": 0.8324, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7691266079891672, |
|
"grad_norm": 0.8868729472160339, |
|
"learning_rate": 0.00015441616766467066, |
|
"loss": 0.8521, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7799593771157752, |
|
"grad_norm": 0.9720478653907776, |
|
"learning_rate": 0.0001536676646706587, |
|
"loss": 0.7759, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7907921462423833, |
|
"grad_norm": 0.8006075620651245, |
|
"learning_rate": 0.0001529191616766467, |
|
"loss": 0.7981, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.8016249153689912, |
|
"grad_norm": 0.9107721447944641, |
|
"learning_rate": 0.00015217065868263475, |
|
"loss": 0.7868, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.8124576844955992, |
|
"grad_norm": 0.7584466338157654, |
|
"learning_rate": 0.00015142215568862276, |
|
"loss": 0.7401, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8232904536222072, |
|
"grad_norm": 1.0075221061706543, |
|
"learning_rate": 0.00015067365269461077, |
|
"loss": 0.8024, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.8341232227488151, |
|
"grad_norm": 0.8769344091415405, |
|
"learning_rate": 0.0001499251497005988, |
|
"loss": 0.7779, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8449559918754231, |
|
"grad_norm": 0.84312903881073, |
|
"learning_rate": 0.00014917664670658685, |
|
"loss": 0.8314, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8557887610020312, |
|
"grad_norm": 0.8116353750228882, |
|
"learning_rate": 0.00014842814371257486, |
|
"loss": 0.8146, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8666215301286392, |
|
"grad_norm": 0.8301011919975281, |
|
"learning_rate": 0.00014767964071856287, |
|
"loss": 0.7422, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8774542992552471, |
|
"grad_norm": 0.8579692244529724, |
|
"learning_rate": 0.00014693113772455091, |
|
"loss": 0.7442, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8882870683818551, |
|
"grad_norm": 0.7513943910598755, |
|
"learning_rate": 0.00014618263473053893, |
|
"loss": 0.7671, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8991198375084631, |
|
"grad_norm": 0.9639107584953308, |
|
"learning_rate": 0.00014543413173652696, |
|
"loss": 0.7896, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.909952606635071, |
|
"grad_norm": 0.8897636532783508, |
|
"learning_rate": 0.00014468562874251498, |
|
"loss": 0.7613, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9207853757616791, |
|
"grad_norm": 0.7998213171958923, |
|
"learning_rate": 0.000143937125748503, |
|
"loss": 0.7647, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9316181448882871, |
|
"grad_norm": 0.6916050910949707, |
|
"learning_rate": 0.00014318862275449103, |
|
"loss": 0.7697, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.942450914014895, |
|
"grad_norm": 1.0154324769973755, |
|
"learning_rate": 0.00014244011976047904, |
|
"loss": 0.7314, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.953283683141503, |
|
"grad_norm": 0.9787517786026001, |
|
"learning_rate": 0.00014169161676646708, |
|
"loss": 0.8047, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.964116452268111, |
|
"grad_norm": 0.6035457253456116, |
|
"learning_rate": 0.00014094311377245512, |
|
"loss": 0.783, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9749492213947191, |
|
"grad_norm": 0.940951943397522, |
|
"learning_rate": 0.0001401946107784431, |
|
"loss": 0.7741, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.985781990521327, |
|
"grad_norm": 0.7785654067993164, |
|
"learning_rate": 0.00013944610778443114, |
|
"loss": 0.7855, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.996614759647935, |
|
"grad_norm": 0.8356137275695801, |
|
"learning_rate": 0.00013869760479041918, |
|
"loss": 0.8292, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.0064996614759647, |
|
"grad_norm": 0.6590499877929688, |
|
"learning_rate": 0.0001379491017964072, |
|
"loss": 0.6858, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.0173324306025728, |
|
"grad_norm": 1.0389671325683594, |
|
"learning_rate": 0.00013720059880239523, |
|
"loss": 0.6097, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.0281651997291807, |
|
"grad_norm": 0.9596243500709534, |
|
"learning_rate": 0.00013645209580838324, |
|
"loss": 0.5676, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.0389979688557887, |
|
"grad_norm": 1.0831798315048218, |
|
"learning_rate": 0.00013570359281437125, |
|
"loss": 0.6106, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.0498307379823968, |
|
"grad_norm": 0.92978835105896, |
|
"learning_rate": 0.0001349550898203593, |
|
"loss": 0.5924, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.0606635071090047, |
|
"grad_norm": 0.9672062993049622, |
|
"learning_rate": 0.0001342065868263473, |
|
"loss": 0.5496, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.0714962762356128, |
|
"grad_norm": 1.1402652263641357, |
|
"learning_rate": 0.00013345808383233534, |
|
"loss": 0.5871, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.0823290453622207, |
|
"grad_norm": 1.1109035015106201, |
|
"learning_rate": 0.00013270958083832335, |
|
"loss": 0.5424, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.0823290453622207, |
|
"eval_loss": 0.8179630041122437, |
|
"eval_runtime": 357.2769, |
|
"eval_samples_per_second": 4.596, |
|
"eval_steps_per_second": 2.298, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.0931618144888287, |
|
"grad_norm": 0.8117087483406067, |
|
"learning_rate": 0.00013196107784431137, |
|
"loss": 0.5636, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.1039945836154368, |
|
"grad_norm": 0.86320561170578, |
|
"learning_rate": 0.0001312125748502994, |
|
"loss": 0.5191, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.1148273527420447, |
|
"grad_norm": 1.1274133920669556, |
|
"learning_rate": 0.00013046407185628744, |
|
"loss": 0.5891, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.1256601218686526, |
|
"grad_norm": 1.0116336345672607, |
|
"learning_rate": 0.00012971556886227546, |
|
"loss": 0.5579, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.1364928909952607, |
|
"grad_norm": 0.9277855157852173, |
|
"learning_rate": 0.0001289670658682635, |
|
"loss": 0.5971, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.1473256601218687, |
|
"grad_norm": 1.0700503587722778, |
|
"learning_rate": 0.0001282185628742515, |
|
"loss": 0.5815, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.1581584292484766, |
|
"grad_norm": 0.9346574544906616, |
|
"learning_rate": 0.00012747005988023952, |
|
"loss": 0.5472, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.1689911983750847, |
|
"grad_norm": 1.047631025314331, |
|
"learning_rate": 0.00012672155688622756, |
|
"loss": 0.5479, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.1798239675016926, |
|
"grad_norm": 0.9931487441062927, |
|
"learning_rate": 0.00012597305389221557, |
|
"loss": 0.5521, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.1906567366283005, |
|
"grad_norm": 0.9764857292175293, |
|
"learning_rate": 0.0001252245508982036, |
|
"loss": 0.584, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.2014895057549086, |
|
"grad_norm": 1.0661903619766235, |
|
"learning_rate": 0.00012447604790419162, |
|
"loss": 0.6101, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.2123222748815166, |
|
"grad_norm": 1.0962295532226562, |
|
"learning_rate": 0.00012372754491017963, |
|
"loss": 0.6028, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.2231550440081245, |
|
"grad_norm": 0.9794766306877136, |
|
"learning_rate": 0.00012297904191616767, |
|
"loss": 0.5813, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.2339878131347326, |
|
"grad_norm": 0.9556275606155396, |
|
"learning_rate": 0.0001222305389221557, |
|
"loss": 0.5662, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.2448205822613405, |
|
"grad_norm": 1.1200224161148071, |
|
"learning_rate": 0.0001214820359281437, |
|
"loss": 0.5642, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.2556533513879486, |
|
"grad_norm": 1.0518434047698975, |
|
"learning_rate": 0.00012073353293413175, |
|
"loss": 0.6126, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.2664861205145566, |
|
"grad_norm": 1.1709963083267212, |
|
"learning_rate": 0.00011998502994011977, |
|
"loss": 0.5189, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.2773188896411645, |
|
"grad_norm": 0.8867760896682739, |
|
"learning_rate": 0.00011923652694610778, |
|
"loss": 0.6098, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.2881516587677724, |
|
"grad_norm": 0.9317127466201782, |
|
"learning_rate": 0.00011848802395209582, |
|
"loss": 0.5667, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.2989844278943805, |
|
"grad_norm": 1.1382100582122803, |
|
"learning_rate": 0.00011773952095808385, |
|
"loss": 0.5756, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.3098171970209884, |
|
"grad_norm": 0.9819681644439697, |
|
"learning_rate": 0.00011699101796407186, |
|
"loss": 0.5922, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.3206499661475966, |
|
"grad_norm": 1.0776174068450928, |
|
"learning_rate": 0.00011624251497005988, |
|
"loss": 0.5728, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.3314827352742045, |
|
"grad_norm": 1.0137302875518799, |
|
"learning_rate": 0.0001154940119760479, |
|
"loss": 0.5603, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.3423155044008124, |
|
"grad_norm": 1.1223585605621338, |
|
"learning_rate": 0.00011474550898203593, |
|
"loss": 0.5639, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.3531482735274205, |
|
"grad_norm": 0.8942229747772217, |
|
"learning_rate": 0.00011399700598802396, |
|
"loss": 0.586, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.3639810426540284, |
|
"grad_norm": 1.225698709487915, |
|
"learning_rate": 0.00011324850299401197, |
|
"loss": 0.563, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.3748138117806366, |
|
"grad_norm": 1.159463882446289, |
|
"learning_rate": 0.00011250000000000001, |
|
"loss": 0.5898, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.3856465809072445, |
|
"grad_norm": 1.0059807300567627, |
|
"learning_rate": 0.00011175149700598804, |
|
"loss": 0.6096, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.3964793500338524, |
|
"grad_norm": 1.1433062553405762, |
|
"learning_rate": 0.00011100299401197605, |
|
"loss": 0.5411, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.4073121191604603, |
|
"grad_norm": 1.0282905101776123, |
|
"learning_rate": 0.00011025449101796407, |
|
"loss": 0.5928, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.4181448882870684, |
|
"grad_norm": 0.8389853835105896, |
|
"learning_rate": 0.00010950598802395211, |
|
"loss": 0.5657, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.4289776574136763, |
|
"grad_norm": 1.132350206375122, |
|
"learning_rate": 0.00010875748502994012, |
|
"loss": 0.6196, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.4398104265402845, |
|
"grad_norm": 1.1093621253967285, |
|
"learning_rate": 0.00010800898203592815, |
|
"loss": 0.5845, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.4506431956668924, |
|
"grad_norm": 1.3198816776275635, |
|
"learning_rate": 0.00010726047904191616, |
|
"loss": 0.5711, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.4614759647935003, |
|
"grad_norm": 0.8968690037727356, |
|
"learning_rate": 0.0001065119760479042, |
|
"loss": 0.6075, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.4723087339201082, |
|
"grad_norm": 1.0248963832855225, |
|
"learning_rate": 0.00010576347305389222, |
|
"loss": 0.5869, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.4831415030467163, |
|
"grad_norm": 1.2115412950515747, |
|
"learning_rate": 0.00010501497005988024, |
|
"loss": 0.549, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.4939742721733242, |
|
"grad_norm": 1.1320476531982422, |
|
"learning_rate": 0.00010426646706586826, |
|
"loss": 0.5661, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.5048070412999324, |
|
"grad_norm": 1.0099844932556152, |
|
"learning_rate": 0.0001035179640718563, |
|
"loss": 0.5953, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.5156398104265403, |
|
"grad_norm": 0.9809553623199463, |
|
"learning_rate": 0.00010276946107784431, |
|
"loss": 0.578, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.5264725795531482, |
|
"grad_norm": 1.4169446229934692, |
|
"learning_rate": 0.00010202095808383234, |
|
"loss": 0.6173, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.537305348679756, |
|
"grad_norm": 1.1033852100372314, |
|
"learning_rate": 0.00010127245508982038, |
|
"loss": 0.5917, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.5481381178063642, |
|
"grad_norm": 1.1163372993469238, |
|
"learning_rate": 0.00010052395209580839, |
|
"loss": 0.589, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.5589708869329724, |
|
"grad_norm": 0.9786676168441772, |
|
"learning_rate": 9.977544910179641e-05, |
|
"loss": 0.5425, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.5698036560595803, |
|
"grad_norm": 1.034001111984253, |
|
"learning_rate": 9.902694610778444e-05, |
|
"loss": 0.5467, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.5806364251861882, |
|
"grad_norm": 0.8697665929794312, |
|
"learning_rate": 9.827844311377245e-05, |
|
"loss": 0.5882, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.591469194312796, |
|
"grad_norm": 1.0091935396194458, |
|
"learning_rate": 9.752994011976049e-05, |
|
"loss": 0.573, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.6023019634394042, |
|
"grad_norm": 1.0126501321792603, |
|
"learning_rate": 9.678143712574852e-05, |
|
"loss": 0.6083, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.6131347325660121, |
|
"grad_norm": 0.9271785020828247, |
|
"learning_rate": 9.603293413173653e-05, |
|
"loss": 0.5564, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.6239675016926203, |
|
"grad_norm": 1.0736253261566162, |
|
"learning_rate": 9.528443113772455e-05, |
|
"loss": 0.5696, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.6239675016926203, |
|
"eval_loss": 0.7986094355583191, |
|
"eval_runtime": 358.2761, |
|
"eval_samples_per_second": 4.583, |
|
"eval_steps_per_second": 2.292, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.6348002708192282, |
|
"grad_norm": 0.9671568870544434, |
|
"learning_rate": 9.453592814371258e-05, |
|
"loss": 0.5994, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.645633039945836, |
|
"grad_norm": 0.9636701345443726, |
|
"learning_rate": 9.37874251497006e-05, |
|
"loss": 0.6096, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.656465809072444, |
|
"grad_norm": 1.1323844194412231, |
|
"learning_rate": 9.303892215568863e-05, |
|
"loss": 0.5981, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.6672985781990521, |
|
"grad_norm": 1.0002387762069702, |
|
"learning_rate": 9.229041916167665e-05, |
|
"loss": 0.5807, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.6781313473256603, |
|
"grad_norm": 1.2000038623809814, |
|
"learning_rate": 9.154191616766468e-05, |
|
"loss": 0.5583, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.6889641164522682, |
|
"grad_norm": 1.153903841972351, |
|
"learning_rate": 9.079341317365269e-05, |
|
"loss": 0.6237, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.699796885578876, |
|
"grad_norm": 1.0791847705841064, |
|
"learning_rate": 9.004491017964072e-05, |
|
"loss": 0.5457, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.710629654705484, |
|
"grad_norm": 1.1212618350982666, |
|
"learning_rate": 8.929640718562875e-05, |
|
"loss": 0.551, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.721462423832092, |
|
"grad_norm": 1.219691514968872, |
|
"learning_rate": 8.854790419161677e-05, |
|
"loss": 0.6027, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.7322951929587, |
|
"grad_norm": 1.066247820854187, |
|
"learning_rate": 8.779940119760479e-05, |
|
"loss": 0.5739, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.7431279620853082, |
|
"grad_norm": 1.070609450340271, |
|
"learning_rate": 8.705089820359282e-05, |
|
"loss": 0.6113, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.753960731211916, |
|
"grad_norm": 1.377456784248352, |
|
"learning_rate": 8.630239520958084e-05, |
|
"loss": 0.5648, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.764793500338524, |
|
"grad_norm": 1.0471181869506836, |
|
"learning_rate": 8.555389221556887e-05, |
|
"loss": 0.5514, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.775626269465132, |
|
"grad_norm": 1.2327128648757935, |
|
"learning_rate": 8.480538922155688e-05, |
|
"loss": 0.6072, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.78645903859174, |
|
"grad_norm": 1.004497766494751, |
|
"learning_rate": 8.405688622754492e-05, |
|
"loss": 0.5601, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.797291807718348, |
|
"grad_norm": 1.2862775325775146, |
|
"learning_rate": 8.330838323353294e-05, |
|
"loss": 0.6073, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.808124576844956, |
|
"grad_norm": 1.0752897262573242, |
|
"learning_rate": 8.255988023952096e-05, |
|
"loss": 0.6079, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.818957345971564, |
|
"grad_norm": 1.031568169593811, |
|
"learning_rate": 8.1811377245509e-05, |
|
"loss": 0.5701, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.829790115098172, |
|
"grad_norm": 1.2067883014678955, |
|
"learning_rate": 8.1062874251497e-05, |
|
"loss": 0.6024, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.8406228842247798, |
|
"grad_norm": 1.2873584032058716, |
|
"learning_rate": 8.031437125748503e-05, |
|
"loss": 0.6033, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.851455653351388, |
|
"grad_norm": 1.1230562925338745, |
|
"learning_rate": 7.956586826347306e-05, |
|
"loss": 0.5534, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.862288422477996, |
|
"grad_norm": 1.275429129600525, |
|
"learning_rate": 7.881736526946108e-05, |
|
"loss": 0.5483, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.873121191604604, |
|
"grad_norm": 1.1561681032180786, |
|
"learning_rate": 7.806886227544911e-05, |
|
"loss": 0.5948, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.883953960731212, |
|
"grad_norm": 1.0285365581512451, |
|
"learning_rate": 7.732035928143713e-05, |
|
"loss": 0.5843, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.8947867298578198, |
|
"grad_norm": 1.257944107055664, |
|
"learning_rate": 7.657185628742516e-05, |
|
"loss": 0.5672, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.9056194989844277, |
|
"grad_norm": 1.2069061994552612, |
|
"learning_rate": 7.582335329341318e-05, |
|
"loss": 0.6312, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.9164522681110359, |
|
"grad_norm": 0.946007251739502, |
|
"learning_rate": 7.50748502994012e-05, |
|
"loss": 0.6028, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.927285037237644, |
|
"grad_norm": 1.3141242265701294, |
|
"learning_rate": 7.432634730538922e-05, |
|
"loss": 0.5762, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.938117806364252, |
|
"grad_norm": 0.9737468957901001, |
|
"learning_rate": 7.357784431137726e-05, |
|
"loss": 0.5637, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.9489505754908598, |
|
"grad_norm": 1.0719372034072876, |
|
"learning_rate": 7.282934131736527e-05, |
|
"loss": 0.5685, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.9597833446174677, |
|
"grad_norm": 0.9777527451515198, |
|
"learning_rate": 7.20808383233533e-05, |
|
"loss": 0.5823, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.9706161137440759, |
|
"grad_norm": 1.019610047340393, |
|
"learning_rate": 7.133233532934132e-05, |
|
"loss": 0.5342, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.9814488828706838, |
|
"grad_norm": 1.2895872592926025, |
|
"learning_rate": 7.058383233532935e-05, |
|
"loss": 0.5625, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.992281651997292, |
|
"grad_norm": 1.1473089456558228, |
|
"learning_rate": 6.983532934131737e-05, |
|
"loss": 0.5476, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.0021665538253215, |
|
"grad_norm": 0.9660665392875671, |
|
"learning_rate": 6.908682634730538e-05, |
|
"loss": 0.5755, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.0129993229519294, |
|
"grad_norm": 1.2142918109893799, |
|
"learning_rate": 6.833832335329342e-05, |
|
"loss": 0.3712, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.0238320920785378, |
|
"grad_norm": 1.4106266498565674, |
|
"learning_rate": 6.758982035928145e-05, |
|
"loss": 0.3645, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.0346648612051457, |
|
"grad_norm": 1.2526434659957886, |
|
"learning_rate": 6.684131736526946e-05, |
|
"loss": 0.3634, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.0454976303317536, |
|
"grad_norm": 1.2345237731933594, |
|
"learning_rate": 6.609281437125749e-05, |
|
"loss": 0.3181, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.0563303994583615, |
|
"grad_norm": 1.1664937734603882, |
|
"learning_rate": 6.534431137724551e-05, |
|
"loss": 0.3412, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.0671631685849694, |
|
"grad_norm": 1.3861303329467773, |
|
"learning_rate": 6.459580838323354e-05, |
|
"loss": 0.348, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.0779959377115773, |
|
"grad_norm": 1.4952672719955444, |
|
"learning_rate": 6.384730538922156e-05, |
|
"loss": 0.3193, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.0888287068381857, |
|
"grad_norm": 1.3477551937103271, |
|
"learning_rate": 6.309880239520959e-05, |
|
"loss": 0.3499, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.0996614759647936, |
|
"grad_norm": 1.5403518676757812, |
|
"learning_rate": 6.235029940119761e-05, |
|
"loss": 0.3742, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.1104942450914015, |
|
"grad_norm": 1.2402806282043457, |
|
"learning_rate": 6.160179640718562e-05, |
|
"loss": 0.3575, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.1213270142180094, |
|
"grad_norm": 1.0961482524871826, |
|
"learning_rate": 6.085329341317365e-05, |
|
"loss": 0.355, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.1321597833446173, |
|
"grad_norm": 1.1491034030914307, |
|
"learning_rate": 6.010479041916168e-05, |
|
"loss": 0.3555, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.1429925524712257, |
|
"grad_norm": 1.6308982372283936, |
|
"learning_rate": 5.9356287425149706e-05, |
|
"loss": 0.3408, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.1538253215978336, |
|
"grad_norm": 1.481628656387329, |
|
"learning_rate": 5.8607784431137725e-05, |
|
"loss": 0.3814, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.1646580907244415, |
|
"grad_norm": 1.2989139556884766, |
|
"learning_rate": 5.785928143712576e-05, |
|
"loss": 0.343, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.1646580907244415, |
|
"eval_loss": 0.8451017141342163, |
|
"eval_runtime": 357.77, |
|
"eval_samples_per_second": 4.59, |
|
"eval_steps_per_second": 2.295, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.1754908598510494, |
|
"grad_norm": 1.3194793462753296, |
|
"learning_rate": 5.7110778443113775e-05, |
|
"loss": 0.3473, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.1863236289776573, |
|
"grad_norm": 1.0370373725891113, |
|
"learning_rate": 5.63622754491018e-05, |
|
"loss": 0.3167, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.197156398104265, |
|
"grad_norm": 1.1874184608459473, |
|
"learning_rate": 5.561377245508982e-05, |
|
"loss": 0.3533, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.2079891672308736, |
|
"grad_norm": 1.6191632747650146, |
|
"learning_rate": 5.486526946107785e-05, |
|
"loss": 0.3676, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.2188219363574815, |
|
"grad_norm": 1.5258554220199585, |
|
"learning_rate": 5.411676646706587e-05, |
|
"loss": 0.3684, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.2296547054840894, |
|
"grad_norm": 1.5065487623214722, |
|
"learning_rate": 5.3368263473053895e-05, |
|
"loss": 0.3634, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.2404874746106973, |
|
"grad_norm": 1.4091628789901733, |
|
"learning_rate": 5.261976047904193e-05, |
|
"loss": 0.3637, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.251320243737305, |
|
"grad_norm": 1.304001808166504, |
|
"learning_rate": 5.1871257485029946e-05, |
|
"loss": 0.3429, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.262153012863913, |
|
"grad_norm": 1.4180433750152588, |
|
"learning_rate": 5.1122754491017964e-05, |
|
"loss": 0.3317, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.2729857819905215, |
|
"grad_norm": 1.4093214273452759, |
|
"learning_rate": 5.037425149700599e-05, |
|
"loss": 0.3068, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.2838185511171294, |
|
"grad_norm": 1.3135852813720703, |
|
"learning_rate": 4.9625748502994015e-05, |
|
"loss": 0.3606, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.2946513202437373, |
|
"grad_norm": 1.180317759513855, |
|
"learning_rate": 4.887724550898204e-05, |
|
"loss": 0.347, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.305484089370345, |
|
"grad_norm": 1.3517941236495972, |
|
"learning_rate": 4.812874251497006e-05, |
|
"loss": 0.3263, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.316316858496953, |
|
"grad_norm": 1.4726624488830566, |
|
"learning_rate": 4.7380239520958084e-05, |
|
"loss": 0.369, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.3271496276235615, |
|
"grad_norm": 1.5008536577224731, |
|
"learning_rate": 4.663173652694611e-05, |
|
"loss": 0.373, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.3379823967501694, |
|
"grad_norm": 1.251826524734497, |
|
"learning_rate": 4.5883233532934135e-05, |
|
"loss": 0.3576, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 2.3488151658767773, |
|
"grad_norm": 1.3931865692138672, |
|
"learning_rate": 4.513473053892216e-05, |
|
"loss": 0.3544, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.359647935003385, |
|
"grad_norm": 1.2691062688827515, |
|
"learning_rate": 4.438622754491018e-05, |
|
"loss": 0.3802, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.370480704129993, |
|
"grad_norm": 1.1229090690612793, |
|
"learning_rate": 4.3637724550898204e-05, |
|
"loss": 0.3144, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.381313473256601, |
|
"grad_norm": 1.153255581855774, |
|
"learning_rate": 4.288922155688623e-05, |
|
"loss": 0.3531, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.3921462423832094, |
|
"grad_norm": 1.426155924797058, |
|
"learning_rate": 4.2140718562874255e-05, |
|
"loss": 0.359, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.4029790115098173, |
|
"grad_norm": 1.1010292768478394, |
|
"learning_rate": 4.139221556886227e-05, |
|
"loss": 0.3529, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.413811780636425, |
|
"grad_norm": 1.080626130104065, |
|
"learning_rate": 4.07185628742515e-05, |
|
"loss": 0.3312, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.424644549763033, |
|
"grad_norm": 1.2923741340637207, |
|
"learning_rate": 3.9970059880239525e-05, |
|
"loss": 0.3347, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.435477318889641, |
|
"grad_norm": 1.5063426494598389, |
|
"learning_rate": 3.9221556886227544e-05, |
|
"loss": 0.3455, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.446310088016249, |
|
"grad_norm": 1.608620285987854, |
|
"learning_rate": 3.847305389221557e-05, |
|
"loss": 0.3507, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.4571428571428573, |
|
"grad_norm": 1.3278478384017944, |
|
"learning_rate": 3.7724550898203595e-05, |
|
"loss": 0.3308, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.467975626269465, |
|
"grad_norm": 0.9832974672317505, |
|
"learning_rate": 3.697604790419162e-05, |
|
"loss": 0.3349, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.478808395396073, |
|
"grad_norm": 1.549773097038269, |
|
"learning_rate": 3.6227544910179645e-05, |
|
"loss": 0.3574, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.489641164522681, |
|
"grad_norm": 1.3476412296295166, |
|
"learning_rate": 3.5479041916167664e-05, |
|
"loss": 0.3226, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.500473933649289, |
|
"grad_norm": 1.3767112493515015, |
|
"learning_rate": 3.473053892215569e-05, |
|
"loss": 0.3651, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.5113067027758973, |
|
"grad_norm": 1.1789909601211548, |
|
"learning_rate": 3.3982035928143714e-05, |
|
"loss": 0.3132, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.522139471902505, |
|
"grad_norm": 1.262040376663208, |
|
"learning_rate": 3.323353293413174e-05, |
|
"loss": 0.3083, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.532972241029113, |
|
"grad_norm": 1.381622552871704, |
|
"learning_rate": 3.248502994011976e-05, |
|
"loss": 0.3666, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.543805010155721, |
|
"grad_norm": 1.4656392335891724, |
|
"learning_rate": 3.1736526946107784e-05, |
|
"loss": 0.3446, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.554637779282329, |
|
"grad_norm": 1.1189512014389038, |
|
"learning_rate": 3.098802395209581e-05, |
|
"loss": 0.3318, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.5654705484089373, |
|
"grad_norm": 1.595189094543457, |
|
"learning_rate": 3.0239520958083834e-05, |
|
"loss": 0.3481, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.5763033175355448, |
|
"grad_norm": 1.0735886096954346, |
|
"learning_rate": 2.949101796407186e-05, |
|
"loss": 0.3698, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.587136086662153, |
|
"grad_norm": 1.2776819467544556, |
|
"learning_rate": 2.874251497005988e-05, |
|
"loss": 0.3517, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.597968855788761, |
|
"grad_norm": 1.514204740524292, |
|
"learning_rate": 2.7994011976047907e-05, |
|
"loss": 0.3443, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.608801624915369, |
|
"grad_norm": 1.3794538974761963, |
|
"learning_rate": 2.724550898203593e-05, |
|
"loss": 0.3634, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.619634394041977, |
|
"grad_norm": 1.6485087871551514, |
|
"learning_rate": 2.6497005988023954e-05, |
|
"loss": 0.3606, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.6304671631685848, |
|
"grad_norm": 1.3901994228363037, |
|
"learning_rate": 2.5748502994011976e-05, |
|
"loss": 0.3398, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.641299932295193, |
|
"grad_norm": 1.384027123451233, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.3635, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.652132701421801, |
|
"grad_norm": 1.4652822017669678, |
|
"learning_rate": 2.4251497005988023e-05, |
|
"loss": 0.3739, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.662965470548409, |
|
"grad_norm": 1.3109517097473145, |
|
"learning_rate": 2.350299401197605e-05, |
|
"loss": 0.3424, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.673798239675017, |
|
"grad_norm": 1.518933892250061, |
|
"learning_rate": 2.275449101796407e-05, |
|
"loss": 0.3644, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.6846310088016248, |
|
"grad_norm": 1.1910316944122314, |
|
"learning_rate": 2.20059880239521e-05, |
|
"loss": 0.3641, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.695463777928233, |
|
"grad_norm": 1.5054243803024292, |
|
"learning_rate": 2.125748502994012e-05, |
|
"loss": 0.3235, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.706296547054841, |
|
"grad_norm": 1.3502476215362549, |
|
"learning_rate": 2.0508982035928146e-05, |
|
"loss": 0.3184, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.706296547054841, |
|
"eval_loss": 0.848840594291687, |
|
"eval_runtime": 357.9885, |
|
"eval_samples_per_second": 4.587, |
|
"eval_steps_per_second": 2.293, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.717129316181449, |
|
"grad_norm": 1.245007872581482, |
|
"learning_rate": 1.9760479041916168e-05, |
|
"loss": 0.3269, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.727962085308057, |
|
"grad_norm": 1.43673574924469, |
|
"learning_rate": 1.9011976047904194e-05, |
|
"loss": 0.3447, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.7387948544346647, |
|
"grad_norm": 1.5020848512649536, |
|
"learning_rate": 1.8263473053892215e-05, |
|
"loss": 0.3329, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.749627623561273, |
|
"grad_norm": 1.483389139175415, |
|
"learning_rate": 1.751497005988024e-05, |
|
"loss": 0.3438, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.7604603926878806, |
|
"grad_norm": 1.2835885286331177, |
|
"learning_rate": 1.6766467065868263e-05, |
|
"loss": 0.3259, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.771293161814489, |
|
"grad_norm": 1.3246042728424072, |
|
"learning_rate": 1.6017964071856288e-05, |
|
"loss": 0.3409, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.782125930941097, |
|
"grad_norm": 1.3244729042053223, |
|
"learning_rate": 1.5269461077844313e-05, |
|
"loss": 0.3044, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.7929587000677047, |
|
"grad_norm": 1.3871108293533325, |
|
"learning_rate": 1.4520958083832337e-05, |
|
"loss": 0.3518, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.8037914691943127, |
|
"grad_norm": 1.1578019857406616, |
|
"learning_rate": 1.377245508982036e-05, |
|
"loss": 0.3692, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.8146242383209206, |
|
"grad_norm": 1.3286668062210083, |
|
"learning_rate": 1.3023952095808384e-05, |
|
"loss": 0.3326, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.825457007447529, |
|
"grad_norm": 1.5447322130203247, |
|
"learning_rate": 1.2275449101796408e-05, |
|
"loss": 0.3555, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.836289776574137, |
|
"grad_norm": 1.2768478393554688, |
|
"learning_rate": 1.1526946107784433e-05, |
|
"loss": 0.3502, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.8471225457007447, |
|
"grad_norm": 1.2796357870101929, |
|
"learning_rate": 1.0778443113772455e-05, |
|
"loss": 0.3226, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.8579553148273527, |
|
"grad_norm": 1.370597243309021, |
|
"learning_rate": 1.0029940119760479e-05, |
|
"loss": 0.3377, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.8687880839539606, |
|
"grad_norm": 1.3650193214416504, |
|
"learning_rate": 9.281437125748502e-06, |
|
"loss": 0.3471, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.879620853080569, |
|
"grad_norm": 1.3109257221221924, |
|
"learning_rate": 8.532934131736528e-06, |
|
"loss": 0.3133, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.890453622207177, |
|
"grad_norm": 1.2204105854034424, |
|
"learning_rate": 7.784431137724551e-06, |
|
"loss": 0.319, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.9012863913337847, |
|
"grad_norm": 1.1295379400253296, |
|
"learning_rate": 7.035928143712575e-06, |
|
"loss": 0.3297, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.9121191604603927, |
|
"grad_norm": 1.130431890487671, |
|
"learning_rate": 6.2874251497005985e-06, |
|
"loss": 0.328, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.9229519295870006, |
|
"grad_norm": 1.2526017427444458, |
|
"learning_rate": 5.538922155688623e-06, |
|
"loss": 0.3287, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.933784698713609, |
|
"grad_norm": 1.4595975875854492, |
|
"learning_rate": 4.7904191616766475e-06, |
|
"loss": 0.3272, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.9446174678402164, |
|
"grad_norm": 1.2819081544876099, |
|
"learning_rate": 4.041916167664671e-06, |
|
"loss": 0.3624, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.9554502369668247, |
|
"grad_norm": 1.216588020324707, |
|
"learning_rate": 3.293413173652695e-06, |
|
"loss": 0.3416, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.9662830060934327, |
|
"grad_norm": 1.3824045658111572, |
|
"learning_rate": 2.5449101796407188e-06, |
|
"loss": 0.3654, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.9771157752200406, |
|
"grad_norm": 1.2501113414764404, |
|
"learning_rate": 1.7964071856287426e-06, |
|
"loss": 0.3443, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.9879485443466485, |
|
"grad_norm": 1.1143591403961182, |
|
"learning_rate": 1.0479041916167664e-06, |
|
"loss": 0.3604, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.9987813134732564, |
|
"grad_norm": 1.2795544862747192, |
|
"learning_rate": 2.9940119760479047e-07, |
|
"loss": 0.3251, |
|
"step": 2770 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2772, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 9.746693219533455e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|