|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.1060723371308483, |
|
"eval_steps": 500, |
|
"global_step": 10000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.011060723371308484, |
|
"grad_norm": 538.04736328125, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 78.145, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.022121446742616967, |
|
"grad_norm": 219.34432983398438, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 92.9586, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03318217011392545, |
|
"grad_norm": 726.5635986328125, |
|
"learning_rate": 3e-06, |
|
"loss": 53.8293, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.044242893485233935, |
|
"grad_norm": 125.79649353027344, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 46.7007, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.05530361685654242, |
|
"grad_norm": 1567.8216552734375, |
|
"learning_rate": 5e-06, |
|
"loss": 45.0472, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0663643402278509, |
|
"grad_norm": 1465.835205078125, |
|
"learning_rate": 6e-06, |
|
"loss": 77.118, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.07742506359915939, |
|
"grad_norm": 879.969482421875, |
|
"learning_rate": 7e-06, |
|
"loss": 39.1988, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.08848578697046787, |
|
"grad_norm": 271.9729309082031, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 79.6761, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.09954651034177635, |
|
"grad_norm": 273.8168029785156, |
|
"learning_rate": 9e-06, |
|
"loss": 59.4896, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.11060723371308484, |
|
"grad_norm": 226.001220703125, |
|
"learning_rate": 1e-05, |
|
"loss": 58.7963, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.12166795708439332, |
|
"grad_norm": 281.18731689453125, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 36.1047, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.1327286804557018, |
|
"grad_norm": 483.055908203125, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 47.9512, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.14378940382701028, |
|
"grad_norm": 308.240966796875, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 28.3554, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.15485012719831878, |
|
"grad_norm": 4638.63232421875, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 74.364, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.16591085056962726, |
|
"grad_norm": 287.8631896972656, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 66.8505, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.17697157394093574, |
|
"grad_norm": 63.01036071777344, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 66.0975, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.18803229731224422, |
|
"grad_norm": 794.6065063476562, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 39.9113, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.1990930206835527, |
|
"grad_norm": 597.668701171875, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 40.9694, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.21015374405486117, |
|
"grad_norm": 627.0382080078125, |
|
"learning_rate": 9e-06, |
|
"loss": 70.5725, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.22121446742616968, |
|
"grad_norm": 246.689697265625, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 63.1588, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.23227519079747816, |
|
"grad_norm": 109.21578979492188, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 46.5431, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.24333591416878664, |
|
"grad_norm": 740.5646362304688, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 43.8307, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.2543966375400951, |
|
"grad_norm": 295.5723876953125, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 80.948, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.2654573609114036, |
|
"grad_norm": 221.36172485351562, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 31.6573, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.2765180842827121, |
|
"grad_norm": 237.7281494140625, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 68.8744, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.28757880765402055, |
|
"grad_norm": 1603.667724609375, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 61.5703, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.29863953102532903, |
|
"grad_norm": 2168.47607421875, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 57.0335, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.30970025439663756, |
|
"grad_norm": 1646.4892578125, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 29.541, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.32076097776794604, |
|
"grad_norm": 410.48150634765625, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 58.4033, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.3318217011392545, |
|
"grad_norm": 457.2492370605469, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 77.6572, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.342882424510563, |
|
"grad_norm": 222.49844360351562, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 86.4192, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.3539431478818715, |
|
"grad_norm": 218.73208618164062, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 28.7311, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.36500387125317996, |
|
"grad_norm": 514.8499755859375, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 61.9305, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.37606459462448844, |
|
"grad_norm": 471.7864074707031, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 52.1054, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.3871253179957969, |
|
"grad_norm": 26.25701904296875, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 41.1312, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.3981860413671054, |
|
"grad_norm": 158.19760131835938, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 31.3393, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.40924676473841387, |
|
"grad_norm": 426.075927734375, |
|
"learning_rate": 7e-06, |
|
"loss": 53.1371, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.42030748810972235, |
|
"grad_norm": 235.6551513671875, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 74.3628, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.4313682114810309, |
|
"grad_norm": 847.091796875, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 66.8835, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.44242893485233936, |
|
"grad_norm": 462.72064208984375, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 21.3985, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.45348965822364784, |
|
"grad_norm": 84.74278259277344, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 46.6882, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.4645503815949563, |
|
"grad_norm": 307.1804504394531, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 40.7358, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.4756111049662648, |
|
"grad_norm": 135.82408142089844, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 34.787, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.4866718283375733, |
|
"grad_norm": 349.708984375, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 65.6245, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.49773255170888175, |
|
"grad_norm": 226.45968627929688, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 41.7902, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.5087932750801902, |
|
"grad_norm": 626.4755859375, |
|
"learning_rate": 6e-06, |
|
"loss": 41.5158, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.5198539984514987, |
|
"grad_norm": 1342.0029296875, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 36.3761, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.5309147218228072, |
|
"grad_norm": 246.92034912109375, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 65.2225, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.5419754451941157, |
|
"grad_norm": 935.7799682617188, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 28.8935, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5530361685654241, |
|
"grad_norm": 96.46562957763672, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 23.2736, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5640968919367326, |
|
"grad_norm": 331.4699401855469, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 37.549, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.5751576153080411, |
|
"grad_norm": 828.3701782226562, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 39.8081, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.5862183386793496, |
|
"grad_norm": 493.2457275390625, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 71.5602, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.5972790620506581, |
|
"grad_norm": 142.9763946533203, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 35.0131, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.6083397854219667, |
|
"grad_norm": 2258.4990234375, |
|
"learning_rate": 5e-06, |
|
"loss": 53.0131, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.6194005087932751, |
|
"grad_norm": 452.21826171875, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 52.5697, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.6304612321645836, |
|
"grad_norm": 285.1300048828125, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 27.8757, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.6415219555358921, |
|
"grad_norm": 531.2687377929688, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 50.0262, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.6525826789072006, |
|
"grad_norm": 634.396240234375, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 26.5551, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.663643402278509, |
|
"grad_norm": 202.63027954101562, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 47.3534, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.6747041256498175, |
|
"grad_norm": 135.98355102539062, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 56.0106, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.685764849021126, |
|
"grad_norm": 172.3655242919922, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 87.5618, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.6968255723924345, |
|
"grad_norm": 312.11932373046875, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 38.6178, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.707886295763743, |
|
"grad_norm": 313.05389404296875, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 67.4866, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.7189470191350514, |
|
"grad_norm": 220.42941284179688, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 51.0776, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.7300077425063599, |
|
"grad_norm": 255.8204803466797, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 46.7792, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.7410684658776684, |
|
"grad_norm": 303.4053649902344, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 43.976, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.7521291892489769, |
|
"grad_norm": 65.36351776123047, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 32.331, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.7631899126202853, |
|
"grad_norm": 226.184814453125, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 39.1361, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.7742506359915938, |
|
"grad_norm": 242.291015625, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 23.3194, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.7853113593629023, |
|
"grad_norm": 390.12750244140625, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 39.4668, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.7963720827342108, |
|
"grad_norm": 365.88909912109375, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 47.7755, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.8074328061055193, |
|
"grad_norm": 1790.1258544921875, |
|
"learning_rate": 3e-06, |
|
"loss": 45.9991, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.8184935294768277, |
|
"grad_norm": 199.19326782226562, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 29.6907, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.8295542528481362, |
|
"grad_norm": 399.6342468261719, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 42.3841, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.8406149762194447, |
|
"grad_norm": 331.04315185546875, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 65.6187, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.8516756995907533, |
|
"grad_norm": 2146.183349609375, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 43.9393, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.8627364229620618, |
|
"grad_norm": 204.7069549560547, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 41.5145, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.8737971463333702, |
|
"grad_norm": 118.2260971069336, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 23.4009, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.8848578697046787, |
|
"grad_norm": 808.902587890625, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 28.5469, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.8959185930759872, |
|
"grad_norm": 514.1236572265625, |
|
"learning_rate": 2.1111111111111114e-06, |
|
"loss": 32.4101, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.9069793164472957, |
|
"grad_norm": 256.0317077636719, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 49.2802, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.9180400398186042, |
|
"grad_norm": 248.129150390625, |
|
"learning_rate": 1.888888888888889e-06, |
|
"loss": 40.7159, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.9291007631899126, |
|
"grad_norm": 207.14627075195312, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 36.8628, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.9401614865612211, |
|
"grad_norm": 492.2493591308594, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 50.4246, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.9512222099325296, |
|
"grad_norm": 237.53575134277344, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 45.3869, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.9622829333038381, |
|
"grad_norm": 446.43524169921875, |
|
"learning_rate": 1.4444444444444445e-06, |
|
"loss": 54.2354, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.9733436566751466, |
|
"grad_norm": 301.3879699707031, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 53.4569, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.984404380046455, |
|
"grad_norm": 378.23992919921875, |
|
"learning_rate": 1.2222222222222223e-06, |
|
"loss": 74.1828, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 0.9954651034177635, |
|
"grad_norm": 244.3396453857422, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 38.1764, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 91.81082153320312, |
|
"eval_runtime": 31.8798, |
|
"eval_samples_per_second": 63.049, |
|
"eval_steps_per_second": 31.525, |
|
"step": 9041 |
|
}, |
|
{ |
|
"epoch": 1.006525826789072, |
|
"grad_norm": 69.61438751220703, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 39.4874, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 1.0175865501603805, |
|
"grad_norm": 204.00283813476562, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 30.213, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.028647273531689, |
|
"grad_norm": 1114.0423583984375, |
|
"learning_rate": 7.777777777777779e-07, |
|
"loss": 19.251, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.0397079969029974, |
|
"grad_norm": 592.7640380859375, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 31.6648, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.050768720274306, |
|
"grad_norm": 258.1551818847656, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 55.9748, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.0618294436456144, |
|
"grad_norm": 12517.9658203125, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 38.2583, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.0728901670169229, |
|
"grad_norm": 101.80846405029297, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 34.3433, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 1.0839508903882313, |
|
"grad_norm": 529.3522338867188, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 35.9397, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 1.0950116137595398, |
|
"grad_norm": 64.61666870117188, |
|
"learning_rate": 1.1111111111111112e-07, |
|
"loss": 20.6341, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.1060723371308483, |
|
"grad_norm": 143.1585235595703, |
|
"learning_rate": 0.0, |
|
"loss": 34.9678, |
|
"step": 10000 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 5000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|