|
{ |
|
"best_metric": 0.530606210231781, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-300", |
|
"epoch": 0.068575347162695, |
|
"eval_steps": 50, |
|
"global_step": 300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002285844905423167, |
|
"grad_norm": 12.226268768310547, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 3.9387, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002285844905423167, |
|
"eval_loss": 4.918857097625732, |
|
"eval_runtime": 1326.574, |
|
"eval_samples_per_second": 5.554, |
|
"eval_steps_per_second": 2.777, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0004571689810846334, |
|
"grad_norm": 16.508821487426758, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 4.288, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0006857534716269502, |
|
"grad_norm": 19.46694564819336, |
|
"learning_rate": 1e-05, |
|
"loss": 4.2996, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0009143379621692668, |
|
"grad_norm": 16.385190963745117, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 3.5999, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0011429224527115835, |
|
"grad_norm": 19.315223693847656, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 3.9415, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0013715069432539003, |
|
"grad_norm": 20.83527946472168, |
|
"learning_rate": 2e-05, |
|
"loss": 3.7644, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.001600091433796217, |
|
"grad_norm": 50.4894905090332, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 5.543, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0018286759243385335, |
|
"grad_norm": 109.4534912109375, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 9.1326, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00205726041488085, |
|
"grad_norm": 209.9546356201172, |
|
"learning_rate": 3e-05, |
|
"loss": 11.0199, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.002285844905423167, |
|
"grad_norm": 126.74362182617188, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 6.5991, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002514429395965484, |
|
"grad_norm": 62.07261657714844, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 4.5192, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0027430138865078006, |
|
"grad_norm": 57.047672271728516, |
|
"learning_rate": 4e-05, |
|
"loss": 2.792, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.002971598377050117, |
|
"grad_norm": 40.239585876464844, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.6243, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.003200182867592434, |
|
"grad_norm": 147.44810485839844, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 7.7768, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0034287673581347507, |
|
"grad_norm": 129.61868286132812, |
|
"learning_rate": 5e-05, |
|
"loss": 3.3601, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.003657351848677067, |
|
"grad_norm": 67.8729476928711, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 2.3788, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.003885936339219384, |
|
"grad_norm": 53.81801986694336, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 1.5561, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0041145208297617, |
|
"grad_norm": 108.63700866699219, |
|
"learning_rate": 6e-05, |
|
"loss": 3.4077, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.004343105320304018, |
|
"grad_norm": 92.20858764648438, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 5.118, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.004571689810846334, |
|
"grad_norm": 107.2628402709961, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 4.0284, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.00480027430138865, |
|
"grad_norm": 84.7047348022461, |
|
"learning_rate": 7e-05, |
|
"loss": 2.3014, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.005028858791930968, |
|
"grad_norm": 80.38473510742188, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 2.9712, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.005257443282473284, |
|
"grad_norm": 58.196502685546875, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 3.1879, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.005486027773015601, |
|
"grad_norm": 217.900146484375, |
|
"learning_rate": 8e-05, |
|
"loss": 10.9929, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.005714612263557918, |
|
"grad_norm": 137.6026153564453, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 5.2346, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.005943196754100234, |
|
"grad_norm": 67.36244201660156, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.8588, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.006171781244642551, |
|
"grad_norm": 53.583919525146484, |
|
"learning_rate": 9e-05, |
|
"loss": 1.208, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.006400365735184868, |
|
"grad_norm": 247.46914672851562, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 23.0121, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.006628950225727184, |
|
"grad_norm": 309.0799255371094, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 11.6811, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.006857534716269501, |
|
"grad_norm": 88.57025146484375, |
|
"learning_rate": 0.0001, |
|
"loss": 6.6625, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.007086119206811818, |
|
"grad_norm": 37.52093505859375, |
|
"learning_rate": 9.999661540018812e-05, |
|
"loss": 5.0423, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.007314703697354134, |
|
"grad_norm": 59.48794937133789, |
|
"learning_rate": 9.998646205897309e-05, |
|
"loss": 4.7137, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.007543288187896451, |
|
"grad_norm": 31.38121223449707, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 2.6695, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.007771872678438768, |
|
"grad_norm": 45.99576950073242, |
|
"learning_rate": 9.994585556692624e-05, |
|
"loss": 3.0682, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.008000457168981085, |
|
"grad_norm": 26.696224212646484, |
|
"learning_rate": 9.991540791356342e-05, |
|
"loss": 3.2612, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0082290416595234, |
|
"grad_norm": 46.0620002746582, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 3.0265, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.008457626150065718, |
|
"grad_norm": 28.95342254638672, |
|
"learning_rate": 9.983424440222531e-05, |
|
"loss": 3.1935, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.008686210640608035, |
|
"grad_norm": 20.682689666748047, |
|
"learning_rate": 9.978353953249022e-05, |
|
"loss": 2.1278, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.00891479513115035, |
|
"grad_norm": 95.37089538574219, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 4.2482, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.009143379621692668, |
|
"grad_norm": 36.080413818359375, |
|
"learning_rate": 9.966191788709716e-05, |
|
"loss": 3.0929, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.009371964112234985, |
|
"grad_norm": 42.082088470458984, |
|
"learning_rate": 9.959101757706308e-05, |
|
"loss": 2.736, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0096005486027773, |
|
"grad_norm": 22.620975494384766, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 3.1542, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.009829133093319618, |
|
"grad_norm": 31.406814575195312, |
|
"learning_rate": 9.942908597485558e-05, |
|
"loss": 3.6269, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.010057717583861935, |
|
"grad_norm": 25.586286544799805, |
|
"learning_rate": 9.933807660562898e-05, |
|
"loss": 6.2693, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.010286302074404252, |
|
"grad_norm": 29.92267608642578, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 7.0683, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.010514886564946568, |
|
"grad_norm": 20.818403244018555, |
|
"learning_rate": 9.913603233532067e-05, |
|
"loss": 6.0885, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.010743471055488885, |
|
"grad_norm": 20.533842086791992, |
|
"learning_rate": 9.902502478779896e-05, |
|
"loss": 5.4988, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.010972055546031203, |
|
"grad_norm": 16.80043601989746, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 4.4107, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.011200640036573518, |
|
"grad_norm": 13.855122566223145, |
|
"learning_rate": 9.878311400921072e-05, |
|
"loss": 4.2749, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.011429224527115835, |
|
"grad_norm": 21.536596298217773, |
|
"learning_rate": 9.865224352899119e-05, |
|
"loss": 5.1643, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.011429224527115835, |
|
"eval_loss": 2.806919813156128, |
|
"eval_runtime": 1332.9908, |
|
"eval_samples_per_second": 5.527, |
|
"eval_steps_per_second": 2.764, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.011657809017658153, |
|
"grad_norm": 15.51828670501709, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 4.3807, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.011886393508200468, |
|
"grad_norm": 11.271062850952148, |
|
"learning_rate": 9.837076097314319e-05, |
|
"loss": 4.1682, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.012114977998742785, |
|
"grad_norm": 8.433566093444824, |
|
"learning_rate": 9.822018700574695e-05, |
|
"loss": 3.3631, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.012343562489285103, |
|
"grad_norm": 8.091526985168457, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 2.9425, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.012572146979827418, |
|
"grad_norm": 8.342974662780762, |
|
"learning_rate": 9.789947561577445e-05, |
|
"loss": 2.9333, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.012800731470369735, |
|
"grad_norm": 17.649492263793945, |
|
"learning_rate": 9.77293816123866e-05, |
|
"loss": 3.3319, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.013029315960912053, |
|
"grad_norm": 28.43054962158203, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.0355, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.013257900451454368, |
|
"grad_norm": 28.488826751708984, |
|
"learning_rate": 9.736983212571646e-05, |
|
"loss": 2.6905, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.013486484941996685, |
|
"grad_norm": 68.5689926147461, |
|
"learning_rate": 9.718042531967918e-05, |
|
"loss": 2.8557, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.013715069432539003, |
|
"grad_norm": 7.864617347717285, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.1518, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.013943653923081318, |
|
"grad_norm": 4.028836727142334, |
|
"learning_rate": 9.678247579197657e-05, |
|
"loss": 0.0393, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.014172238413623636, |
|
"grad_norm": 78.4482192993164, |
|
"learning_rate": 9.657398694630712e-05, |
|
"loss": 0.2168, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.014400822904165953, |
|
"grad_norm": 212.4618682861328, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 3.4277, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.014629407394708268, |
|
"grad_norm": 142.9414825439453, |
|
"learning_rate": 9.613812221777212e-05, |
|
"loss": 11.2092, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.014857991885250586, |
|
"grad_norm": 106.03768157958984, |
|
"learning_rate": 9.591080534401371e-05, |
|
"loss": 3.2856, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.015086576375792903, |
|
"grad_norm": 130.8965606689453, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 3.508, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.015315160866335218, |
|
"grad_norm": 86.60301208496094, |
|
"learning_rate": 9.543755644867822e-05, |
|
"loss": 2.7746, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.015543745356877536, |
|
"grad_norm": 62.65277862548828, |
|
"learning_rate": 9.519168849742604e-05, |
|
"loss": 1.4813, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.01577232984741985, |
|
"grad_norm": 44.15681076049805, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 3.2382, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.01600091433796217, |
|
"grad_norm": 54.2675666809082, |
|
"learning_rate": 9.468163201617062e-05, |
|
"loss": 3.0064, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.016229498828504486, |
|
"grad_norm": 20.503276824951172, |
|
"learning_rate": 9.441751253965021e-05, |
|
"loss": 1.6639, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0164580833190468, |
|
"grad_norm": 42.72779846191406, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 1.9632, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.01668666780958912, |
|
"grad_norm": 94.91614532470703, |
|
"learning_rate": 9.38712698977291e-05, |
|
"loss": 7.9977, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.016915252300131436, |
|
"grad_norm": 120.97164154052734, |
|
"learning_rate": 9.358922068483812e-05, |
|
"loss": 6.0946, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.01714383679067375, |
|
"grad_norm": 65.04187774658203, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 2.1872, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.01737242128121607, |
|
"grad_norm": 102.3792495727539, |
|
"learning_rate": 9.300745739476829e-05, |
|
"loss": 1.955, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.017601005771758386, |
|
"grad_norm": 78.6114501953125, |
|
"learning_rate": 9.270782207902629e-05, |
|
"loss": 1.5882, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0178295902623007, |
|
"grad_norm": 112.37207794189453, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 9.1085, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.01805817475284302, |
|
"grad_norm": 123.03736114501953, |
|
"learning_rate": 9.209124692976287e-05, |
|
"loss": 12.134, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.018286759243385336, |
|
"grad_norm": 64.75898742675781, |
|
"learning_rate": 9.177439057064683e-05, |
|
"loss": 8.9277, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01851534373392765, |
|
"grad_norm": 35.20903015136719, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 5.1302, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.01874392822446997, |
|
"grad_norm": 62.34042739868164, |
|
"learning_rate": 9.112375476403312e-05, |
|
"loss": 5.2516, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.018972512715012286, |
|
"grad_norm": 45.33003616333008, |
|
"learning_rate": 9.079006340220862e-05, |
|
"loss": 4.3035, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0192010972055546, |
|
"grad_norm": 71.29220581054688, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 4.2392, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.01942968169609692, |
|
"grad_norm": 19.70421600341797, |
|
"learning_rate": 9.01061596377522e-05, |
|
"loss": 3.9666, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.019658266186639236, |
|
"grad_norm": 14.824080467224121, |
|
"learning_rate": 8.97560398247424e-05, |
|
"loss": 2.4165, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.019886850677181555, |
|
"grad_norm": 14.691969871520996, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 2.522, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.02011543516772387, |
|
"grad_norm": 20.41330337524414, |
|
"learning_rate": 8.903970133383297e-05, |
|
"loss": 2.6812, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.020344019658266186, |
|
"grad_norm": 16.38035774230957, |
|
"learning_rate": 8.86735796366982e-05, |
|
"loss": 1.7804, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.020572604148808505, |
|
"grad_norm": 14.719985008239746, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 2.0308, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.02080118863935082, |
|
"grad_norm": 21.014904022216797, |
|
"learning_rate": 8.792567916744346e-05, |
|
"loss": 2.2666, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.021029773129893136, |
|
"grad_norm": 27.58352279663086, |
|
"learning_rate": 8.754400164907497e-05, |
|
"loss": 3.785, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.021258357620435455, |
|
"grad_norm": 24.515430450439453, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 5.0201, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.02148694211097777, |
|
"grad_norm": 28.423784255981445, |
|
"learning_rate": 8.676545040299145e-05, |
|
"loss": 6.3986, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.021715526601520086, |
|
"grad_norm": 19.08306312561035, |
|
"learning_rate": 8.636868207865244e-05, |
|
"loss": 6.0112, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.021944111092062405, |
|
"grad_norm": 18.091100692749023, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 4.6697, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.02217269558260472, |
|
"grad_norm": 16.675926208496094, |
|
"learning_rate": 8.556042860050687e-05, |
|
"loss": 5.068, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.022401280073147036, |
|
"grad_norm": 13.843223571777344, |
|
"learning_rate": 8.51490528712831e-05, |
|
"loss": 4.7174, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.022629864563689355, |
|
"grad_norm": 11.593822479248047, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 3.1727, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.02285844905423167, |
|
"grad_norm": 14.757932662963867, |
|
"learning_rate": 8.43120818934367e-05, |
|
"loss": 4.6322, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02285844905423167, |
|
"eval_loss": 2.1855103969573975, |
|
"eval_runtime": 1333.7195, |
|
"eval_samples_per_second": 5.524, |
|
"eval_steps_per_second": 2.762, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.023087033544773986, |
|
"grad_norm": 14.086247444152832, |
|
"learning_rate": 8.388659995728663e-05, |
|
"loss": 4.5384, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.023315618035316305, |
|
"grad_norm": 10.055537223815918, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 4.1292, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.02354420252585862, |
|
"grad_norm": 7.018960952758789, |
|
"learning_rate": 8.302193119995039e-05, |
|
"loss": 3.673, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.023772787016400936, |
|
"grad_norm": 7.652724266052246, |
|
"learning_rate": 8.258286144107276e-05, |
|
"loss": 3.1104, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.024001371506943255, |
|
"grad_norm": 9.536213874816895, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 2.8206, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02422995599748557, |
|
"grad_norm": 9.198083877563477, |
|
"learning_rate": 8.169154836993551e-05, |
|
"loss": 2.8642, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.024458540488027886, |
|
"grad_norm": 14.823854446411133, |
|
"learning_rate": 8.1239425727198e-05, |
|
"loss": 2.7631, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.024687124978570205, |
|
"grad_norm": 44.76399612426758, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 2.9486, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.02491570946911252, |
|
"grad_norm": 29.669414520263672, |
|
"learning_rate": 8.032255426994069e-05, |
|
"loss": 2.7715, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.025144293959654836, |
|
"grad_norm": 60.78176498413086, |
|
"learning_rate": 7.985792958513931e-05, |
|
"loss": 2.6962, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.025372878450197155, |
|
"grad_norm": 17.208572387695312, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.353, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.02560146294073947, |
|
"grad_norm": 3.3832650184631348, |
|
"learning_rate": 7.891661680839932e-05, |
|
"loss": 0.0653, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.025830047431281786, |
|
"grad_norm": 7.614874839782715, |
|
"learning_rate": 7.844005615514259e-05, |
|
"loss": 0.0529, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.026058631921824105, |
|
"grad_norm": 56.453670501708984, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 3.9549, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.02628721641236642, |
|
"grad_norm": 91.68329620361328, |
|
"learning_rate": 7.74754489035403e-05, |
|
"loss": 4.0682, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.026515800902908736, |
|
"grad_norm": 57.930580139160156, |
|
"learning_rate": 7.698753289757565e-05, |
|
"loss": 1.4162, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.026744385393451055, |
|
"grad_norm": 112.63847351074219, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 1.2008, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.02697296988399337, |
|
"grad_norm": 149.06593322753906, |
|
"learning_rate": 7.600080639646077e-05, |
|
"loss": 1.9499, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.027201554374535687, |
|
"grad_norm": 399.54156494140625, |
|
"learning_rate": 7.550212948828377e-05, |
|
"loss": 2.0733, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.027430138865078006, |
|
"grad_norm": 148.1618194580078, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.0831, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02765872335562032, |
|
"grad_norm": 76.7452621459961, |
|
"learning_rate": 7.449448591190435e-05, |
|
"loss": 3.1251, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.027887307846162637, |
|
"grad_norm": 71.67847442626953, |
|
"learning_rate": 7.398565566251232e-05, |
|
"loss": 2.4827, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.028115892336704956, |
|
"grad_norm": 23.501968383789062, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 1.5631, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.02834447682724727, |
|
"grad_norm": 136.9478302001953, |
|
"learning_rate": 7.295832266935059e-05, |
|
"loss": 7.535, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.028573061317789587, |
|
"grad_norm": 91.296142578125, |
|
"learning_rate": 7.243995901002312e-05, |
|
"loss": 3.9653, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.028801645808331906, |
|
"grad_norm": 48.54848861694336, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 2.1426, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02903023029887422, |
|
"grad_norm": 202.33116149902344, |
|
"learning_rate": 7.139418824708272e-05, |
|
"loss": 2.8557, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.029258814789416537, |
|
"grad_norm": 1095.19873046875, |
|
"learning_rate": 7.08669227240909e-05, |
|
"loss": 4.8724, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.029487399279958856, |
|
"grad_norm": 1942.80859375, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 20.7307, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.02971598377050117, |
|
"grad_norm": 439.0259094238281, |
|
"learning_rate": 6.980398830195785e-05, |
|
"loss": 15.7319, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.029944568261043487, |
|
"grad_norm": 145.61013793945312, |
|
"learning_rate": 6.926846330712242e-05, |
|
"loss": 9.0764, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.030173152751585806, |
|
"grad_norm": 65.11675262451172, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 7.8135, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.03040173724212812, |
|
"grad_norm": 371.9282531738281, |
|
"learning_rate": 6.818966024765758e-05, |
|
"loss": 7.8735, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.030630321732670437, |
|
"grad_norm": 113.37095642089844, |
|
"learning_rate": 6.764652823569344e-05, |
|
"loss": 5.4435, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.030858906223212756, |
|
"grad_norm": 152.44851684570312, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 4.9697, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03108749071375507, |
|
"grad_norm": 29.180082321166992, |
|
"learning_rate": 6.65531708942479e-05, |
|
"loss": 3.2838, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.03131607520429739, |
|
"grad_norm": 26.18000030517578, |
|
"learning_rate": 6.600309358784857e-05, |
|
"loss": 3.4387, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.0315446596948397, |
|
"grad_norm": 39.24956512451172, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 3.3899, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.03177324418538202, |
|
"grad_norm": 27.828189849853516, |
|
"learning_rate": 6.48965140519241e-05, |
|
"loss": 2.435, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.03200182867592434, |
|
"grad_norm": 28.83315086364746, |
|
"learning_rate": 6.434016163555452e-05, |
|
"loss": 3.0311, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03223041316646665, |
|
"grad_norm": 23.669389724731445, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 2.4059, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.03245899765700897, |
|
"grad_norm": 13.909722328186035, |
|
"learning_rate": 6.322170810186012e-05, |
|
"loss": 1.7042, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.03268758214755129, |
|
"grad_norm": 14.179429054260254, |
|
"learning_rate": 6.26597584052401e-05, |
|
"loss": 3.3267, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.0329161666380936, |
|
"grad_norm": 17.85869026184082, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 5.1052, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.03314475112863592, |
|
"grad_norm": 23.01960563659668, |
|
"learning_rate": 6.153079353712201e-05, |
|
"loss": 6.2258, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03337333561917824, |
|
"grad_norm": 17.689586639404297, |
|
"learning_rate": 6.096393120939516e-05, |
|
"loss": 5.5296, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.03360192010972055, |
|
"grad_norm": 15.267470359802246, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 5.1839, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.03383050460026287, |
|
"grad_norm": 13.088833808898926, |
|
"learning_rate": 5.982583047664151e-05, |
|
"loss": 4.1851, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.03405908909080519, |
|
"grad_norm": 13.637064933776855, |
|
"learning_rate": 5.925474615223573e-05, |
|
"loss": 4.2342, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0342876735813475, |
|
"grad_norm": 11.020079612731934, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 4.5138, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0342876735813475, |
|
"eval_loss": 1.670865774154663, |
|
"eval_runtime": 1333.0768, |
|
"eval_samples_per_second": 5.527, |
|
"eval_steps_per_second": 2.764, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03451625807188982, |
|
"grad_norm": 15.293791770935059, |
|
"learning_rate": 5.810889615527838e-05, |
|
"loss": 4.6994, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.03474484256243214, |
|
"grad_norm": 9.839681625366211, |
|
"learning_rate": 5.753428561247416e-05, |
|
"loss": 3.8388, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.03497342705297445, |
|
"grad_norm": 7.527024745941162, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 3.2164, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.03520201154351677, |
|
"grad_norm": 8.136163711547852, |
|
"learning_rate": 5.6382082393029746e-05, |
|
"loss": 3.0072, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.03543059603405909, |
|
"grad_norm": 7.6138691902160645, |
|
"learning_rate": 5.5804645706261514e-05, |
|
"loss": 2.8556, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.0356591805246014, |
|
"grad_norm": 23.783597946166992, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 2.5473, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.03588776501514372, |
|
"grad_norm": 20.85479164123535, |
|
"learning_rate": 5.464749304646962e-05, |
|
"loss": 2.6145, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.03611634950568604, |
|
"grad_norm": 24.58341407775879, |
|
"learning_rate": 5.4067933733392915e-05, |
|
"loss": 2.7502, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.03634493399622835, |
|
"grad_norm": 30.31068992614746, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 1.2587, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.03657351848677067, |
|
"grad_norm": 9.1329984664917, |
|
"learning_rate": 5.290724144552379e-05, |
|
"loss": 0.1588, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03680210297731299, |
|
"grad_norm": 30.12510108947754, |
|
"learning_rate": 5.232626560988735e-05, |
|
"loss": 0.4367, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.0370306874678553, |
|
"grad_norm": 21.02296257019043, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 0.4107, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.03725927195839762, |
|
"grad_norm": 39.78958511352539, |
|
"learning_rate": 5.116344781870281e-05, |
|
"loss": 2.3939, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.03748785644893994, |
|
"grad_norm": 89.31826782226562, |
|
"learning_rate": 5.0581763290069865e-05, |
|
"loss": 6.3699, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.03771644093948225, |
|
"grad_norm": 50.574974060058594, |
|
"learning_rate": 5e-05, |
|
"loss": 3.5648, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03794502543002457, |
|
"grad_norm": 30.325014114379883, |
|
"learning_rate": 4.941823670993016e-05, |
|
"loss": 2.0516, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.03817360992056689, |
|
"grad_norm": 19.043434143066406, |
|
"learning_rate": 4.883655218129719e-05, |
|
"loss": 1.2083, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.0384021944111092, |
|
"grad_norm": 29.482452392578125, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 2.1119, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.03863077890165152, |
|
"grad_norm": 37.51374816894531, |
|
"learning_rate": 4.767373439011267e-05, |
|
"loss": 2.4501, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.03885936339219384, |
|
"grad_norm": 23.2747745513916, |
|
"learning_rate": 4.709275855447621e-05, |
|
"loss": 2.0485, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.03908794788273615, |
|
"grad_norm": 15.753173828125, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 1.2318, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.03931653237327847, |
|
"grad_norm": 26.802000045776367, |
|
"learning_rate": 4.593206626660709e-05, |
|
"loss": 1.7534, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.03954511686382079, |
|
"grad_norm": 94.17913818359375, |
|
"learning_rate": 4.535250695353039e-05, |
|
"loss": 6.9058, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.03977370135436311, |
|
"grad_norm": 129.48439025878906, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 7.7345, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.04000228584490542, |
|
"grad_norm": 83.24310302734375, |
|
"learning_rate": 4.4195354293738484e-05, |
|
"loss": 5.7297, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04023087033544774, |
|
"grad_norm": 116.52914428710938, |
|
"learning_rate": 4.361791760697027e-05, |
|
"loss": 4.7677, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.04045945482599006, |
|
"grad_norm": 39.86652374267578, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 1.596, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.04068803931653237, |
|
"grad_norm": 14.975143432617188, |
|
"learning_rate": 4.246571438752585e-05, |
|
"loss": 1.2558, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.04091662380707469, |
|
"grad_norm": 86.05208587646484, |
|
"learning_rate": 4.1891103844721636e-05, |
|
"loss": 8.7519, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.04114520829761701, |
|
"grad_norm": 40.95692825317383, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 6.2332, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04137379278815932, |
|
"grad_norm": 82.19147491455078, |
|
"learning_rate": 4.074525384776428e-05, |
|
"loss": 7.8078, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.04160237727870164, |
|
"grad_norm": 46.78995132446289, |
|
"learning_rate": 4.017416952335849e-05, |
|
"loss": 5.3358, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.04183096176924396, |
|
"grad_norm": 23.36475372314453, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 4.3148, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.04205954625978627, |
|
"grad_norm": 25.982982635498047, |
|
"learning_rate": 3.903606879060483e-05, |
|
"loss": 2.5694, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.04228813075032859, |
|
"grad_norm": 16.722654342651367, |
|
"learning_rate": 3.846920646287799e-05, |
|
"loss": 2.6081, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04251671524087091, |
|
"grad_norm": 18.733665466308594, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 2.1414, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.04274529973141322, |
|
"grad_norm": 22.727380752563477, |
|
"learning_rate": 3.7340241594759916e-05, |
|
"loss": 3.6351, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.04297388422195554, |
|
"grad_norm": 16.99645233154297, |
|
"learning_rate": 3.67782918981399e-05, |
|
"loss": 2.4938, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.04320246871249786, |
|
"grad_norm": 27.48732566833496, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 2.0775, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.04343105320304017, |
|
"grad_norm": 15.861635208129883, |
|
"learning_rate": 3.5659838364445505e-05, |
|
"loss": 2.5506, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.04365963769358249, |
|
"grad_norm": 25.20842933654785, |
|
"learning_rate": 3.51034859480759e-05, |
|
"loss": 2.6434, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.04388822218412481, |
|
"grad_norm": 28.7867374420166, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 4.0947, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.04411680667466712, |
|
"grad_norm": 16.924789428710938, |
|
"learning_rate": 3.399690641215142e-05, |
|
"loss": 4.607, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.04434539116520944, |
|
"grad_norm": 19.492359161376953, |
|
"learning_rate": 3.34468291057521e-05, |
|
"loss": 5.7136, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.04457397565575176, |
|
"grad_norm": 20.93442726135254, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 5.2217, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04480256014629407, |
|
"grad_norm": 15.588977813720703, |
|
"learning_rate": 3.235347176430656e-05, |
|
"loss": 5.381, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.04503114463683639, |
|
"grad_norm": 13.808337211608887, |
|
"learning_rate": 3.1810339752342446e-05, |
|
"loss": 5.4432, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.04525972912737871, |
|
"grad_norm": 14.99952507019043, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 4.5804, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.04548831361792102, |
|
"grad_norm": 13.91661548614502, |
|
"learning_rate": 3.073153669287759e-05, |
|
"loss": 4.1034, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.04571689810846334, |
|
"grad_norm": 13.113777160644531, |
|
"learning_rate": 3.019601169804216e-05, |
|
"loss": 4.2222, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04571689810846334, |
|
"eval_loss": 1.62479567527771, |
|
"eval_runtime": 1331.6429, |
|
"eval_samples_per_second": 5.533, |
|
"eval_steps_per_second": 2.767, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04594548259900566, |
|
"grad_norm": 10.6249418258667, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 3.5265, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.04617406708954797, |
|
"grad_norm": 8.47287368774414, |
|
"learning_rate": 2.9133077275909108e-05, |
|
"loss": 3.4879, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.04640265158009029, |
|
"grad_norm": 7.781945705413818, |
|
"learning_rate": 2.86058117529173e-05, |
|
"loss": 3.0601, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.04663123607063261, |
|
"grad_norm": 7.176991939544678, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 2.6794, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.04685982056117492, |
|
"grad_norm": 6.956686496734619, |
|
"learning_rate": 2.7560040989976892e-05, |
|
"loss": 2.3741, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.04708840505171724, |
|
"grad_norm": 7.541036605834961, |
|
"learning_rate": 2.7041677330649407e-05, |
|
"loss": 2.4602, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.04731698954225956, |
|
"grad_norm": 26.17896270751953, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 1.7172, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.04754557403280187, |
|
"grad_norm": 44.718421936035156, |
|
"learning_rate": 2.6014344337487707e-05, |
|
"loss": 1.8271, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.04777415852334419, |
|
"grad_norm": 23.585582733154297, |
|
"learning_rate": 2.5505514088095655e-05, |
|
"loss": 1.1668, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.04800274301388651, |
|
"grad_norm": 18.206127166748047, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.7469, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04823132750442882, |
|
"grad_norm": 4.393265724182129, |
|
"learning_rate": 2.4497870511716235e-05, |
|
"loss": 0.1077, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.04845991199497114, |
|
"grad_norm": 4.106040954589844, |
|
"learning_rate": 2.399919360353923e-05, |
|
"loss": 0.2907, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.04868849648551346, |
|
"grad_norm": 13.828383445739746, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 0.8737, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.04891708097605577, |
|
"grad_norm": 59.581298828125, |
|
"learning_rate": 2.3012467102424373e-05, |
|
"loss": 4.8994, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.04914566546659809, |
|
"grad_norm": 52.219642639160156, |
|
"learning_rate": 2.25245510964597e-05, |
|
"loss": 4.3086, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.04937424995714041, |
|
"grad_norm": 57.01164627075195, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 3.0226, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.04960283444768272, |
|
"grad_norm": 35.1441764831543, |
|
"learning_rate": 2.1559943844857422e-05, |
|
"loss": 1.8326, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.04983141893822504, |
|
"grad_norm": 23.194089889526367, |
|
"learning_rate": 2.1083383191600674e-05, |
|
"loss": 0.9515, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.05006000342876736, |
|
"grad_norm": 20.370725631713867, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.2338, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.05028858791930967, |
|
"grad_norm": 31.7033634185791, |
|
"learning_rate": 2.0142070414860704e-05, |
|
"loss": 2.838, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.05051717240985199, |
|
"grad_norm": 30.212299346923828, |
|
"learning_rate": 1.9677445730059346e-05, |
|
"loss": 3.5437, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.05074575690039431, |
|
"grad_norm": 24.48371124267578, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 2.1516, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.05097434139093662, |
|
"grad_norm": 26.28635597229004, |
|
"learning_rate": 1.8760574272802e-05, |
|
"loss": 1.8107, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.05120292588147894, |
|
"grad_norm": 64.8018569946289, |
|
"learning_rate": 1.8308451630064484e-05, |
|
"loss": 3.8766, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.05143151037202126, |
|
"grad_norm": 79.22872161865234, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 5.5396, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05166009486256357, |
|
"grad_norm": 81.44451904296875, |
|
"learning_rate": 1.7417138558927244e-05, |
|
"loss": 5.3564, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.05188867935310589, |
|
"grad_norm": 74.99747467041016, |
|
"learning_rate": 1.6978068800049624e-05, |
|
"loss": 4.2999, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.05211726384364821, |
|
"grad_norm": 30.039758682250977, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 4.5715, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.05234584833419052, |
|
"grad_norm": 25.890417098999023, |
|
"learning_rate": 1.611340004271339e-05, |
|
"loss": 4.2147, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.05257443282473284, |
|
"grad_norm": 24.455820083618164, |
|
"learning_rate": 1.5687918106563326e-05, |
|
"loss": 3.6963, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05280301731527516, |
|
"grad_norm": 13.776949882507324, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 2.5523, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.05303160180581747, |
|
"grad_norm": 14.489748001098633, |
|
"learning_rate": 1.4850947128716913e-05, |
|
"loss": 2.8481, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.05326018629635979, |
|
"grad_norm": 14.851402282714844, |
|
"learning_rate": 1.4439571399493146e-05, |
|
"loss": 2.9429, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.05348877078690211, |
|
"grad_norm": 14.959819793701172, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 2.7784, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.05371735527744442, |
|
"grad_norm": 10.266538619995117, |
|
"learning_rate": 1.3631317921347563e-05, |
|
"loss": 2.1112, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.05394593976798674, |
|
"grad_norm": 13.498571395874023, |
|
"learning_rate": 1.3234549597008571e-05, |
|
"loss": 2.61, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.05417452425852906, |
|
"grad_norm": 13.794647216796875, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 2.655, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.05440310874907137, |
|
"grad_norm": 12.700047492980957, |
|
"learning_rate": 1.245599835092504e-05, |
|
"loss": 2.1982, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.05463169323961369, |
|
"grad_norm": 11.33856201171875, |
|
"learning_rate": 1.2074320832556556e-05, |
|
"loss": 1.8852, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.05486027773015601, |
|
"grad_norm": 12.628767013549805, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 2.6147, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05508886222069832, |
|
"grad_norm": 12.457136154174805, |
|
"learning_rate": 1.132642036330181e-05, |
|
"loss": 2.2797, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.05531744671124064, |
|
"grad_norm": 16.78705406188965, |
|
"learning_rate": 1.096029866616704e-05, |
|
"loss": 2.1772, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.05554603120178296, |
|
"grad_norm": 20.40379524230957, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 4.4865, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.05577461569232527, |
|
"grad_norm": 47.940460205078125, |
|
"learning_rate": 1.0243960175257606e-05, |
|
"loss": 4.1248, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.05600320018286759, |
|
"grad_norm": 20.962236404418945, |
|
"learning_rate": 9.893840362247809e-06, |
|
"loss": 5.0609, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.05623178467340991, |
|
"grad_norm": 17.944730758666992, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 5.8087, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.05646036916395222, |
|
"grad_norm": 14.370197296142578, |
|
"learning_rate": 9.209936597791407e-06, |
|
"loss": 4.9449, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.05668895365449454, |
|
"grad_norm": 13.268957138061523, |
|
"learning_rate": 8.876245235966885e-06, |
|
"loss": 4.4975, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.05691753814503686, |
|
"grad_norm": 13.039083480834961, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 3.907, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.05714612263557917, |
|
"grad_norm": 14.2072172164917, |
|
"learning_rate": 8.225609429353187e-06, |
|
"loss": 3.7406, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05714612263557917, |
|
"eval_loss": 0.5557529926300049, |
|
"eval_runtime": 1331.7964, |
|
"eval_samples_per_second": 5.532, |
|
"eval_steps_per_second": 2.766, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.05737470712612149, |
|
"grad_norm": 31.539043426513672, |
|
"learning_rate": 7.908753070237123e-06, |
|
"loss": 4.1344, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.05760329161666381, |
|
"grad_norm": 10.60650634765625, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 3.037, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.05783187610720612, |
|
"grad_norm": 5.5060715675354, |
|
"learning_rate": 7.292177920973725e-06, |
|
"loss": 2.9713, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.05806046059774844, |
|
"grad_norm": 5.154425144195557, |
|
"learning_rate": 6.992542605231739e-06, |
|
"loss": 2.2515, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.05828904508829076, |
|
"grad_norm": 5.6335129737854, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 2.3608, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05851762957883307, |
|
"grad_norm": 6.264554023742676, |
|
"learning_rate": 6.410779315161886e-06, |
|
"loss": 2.2597, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.05874621406937539, |
|
"grad_norm": 9.82129955291748, |
|
"learning_rate": 6.128730102270897e-06, |
|
"loss": 1.9233, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.05897479855991771, |
|
"grad_norm": 12.5000581741333, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 2.1487, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.05920338305046002, |
|
"grad_norm": 17.345787048339844, |
|
"learning_rate": 5.582487460349805e-06, |
|
"loss": 2.1851, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.05943196754100234, |
|
"grad_norm": 15.88868236541748, |
|
"learning_rate": 5.318367983829392e-06, |
|
"loss": 0.7703, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.05966055203154466, |
|
"grad_norm": 18.099214553833008, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 0.5869, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.05988913652208697, |
|
"grad_norm": 11.634132385253906, |
|
"learning_rate": 4.8083115025739756e-06, |
|
"loss": 0.375, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.06011772101262929, |
|
"grad_norm": 12.680456161499023, |
|
"learning_rate": 4.562443551321788e-06, |
|
"loss": 0.6281, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.06034630550317161, |
|
"grad_norm": 10.923900604248047, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.5998, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.06057488999371392, |
|
"grad_norm": 33.76637268066406, |
|
"learning_rate": 4.089194655986306e-06, |
|
"loss": 2.3875, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06080347448425624, |
|
"grad_norm": 29.129016876220703, |
|
"learning_rate": 3.861877782227885e-06, |
|
"loss": 2.1657, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.06103205897479856, |
|
"grad_norm": 25.44561767578125, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 2.1342, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.061260643465340874, |
|
"grad_norm": 29.16153335571289, |
|
"learning_rate": 3.426013053692878e-06, |
|
"loss": 1.9011, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.06148922795588319, |
|
"grad_norm": 27.153125762939453, |
|
"learning_rate": 3.2175242080234313e-06, |
|
"loss": 1.852, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.06171781244642551, |
|
"grad_norm": 23.434755325317383, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 2.194, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.061946396936967824, |
|
"grad_norm": 31.765398025512695, |
|
"learning_rate": 2.8195746803208244e-06, |
|
"loss": 2.348, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.06217498142751014, |
|
"grad_norm": 20.402849197387695, |
|
"learning_rate": 2.63016787428354e-06, |
|
"loss": 2.2186, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.06240356591805246, |
|
"grad_norm": 43.326622009277344, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.9633, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.06263215040859478, |
|
"grad_norm": 55.71512222290039, |
|
"learning_rate": 2.2706183876134045e-06, |
|
"loss": 2.5231, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.0628607348991371, |
|
"grad_norm": 34.0222282409668, |
|
"learning_rate": 2.100524384225555e-06, |
|
"loss": 2.5434, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.0630893193896794, |
|
"grad_norm": 30.28105354309082, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 2.2744, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.06331790388022172, |
|
"grad_norm": 70.08159637451172, |
|
"learning_rate": 1.7798129942530551e-06, |
|
"loss": 2.8638, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.06354648837076404, |
|
"grad_norm": 30.30040740966797, |
|
"learning_rate": 1.6292390268568104e-06, |
|
"loss": 2.3145, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.06377507286130636, |
|
"grad_norm": 12.503889083862305, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 1.9287, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.06400365735184868, |
|
"grad_norm": 10.321149826049805, |
|
"learning_rate": 1.3477564710088098e-06, |
|
"loss": 1.7086, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.064232241842391, |
|
"grad_norm": 13.305763244628906, |
|
"learning_rate": 1.2168859907892904e-06, |
|
"loss": 2.1641, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.0644608263329333, |
|
"grad_norm": 11.012710571289062, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 2.1994, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.06468941082347562, |
|
"grad_norm": 11.165496826171875, |
|
"learning_rate": 9.749752122010346e-07, |
|
"loss": 1.8865, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.06491799531401794, |
|
"grad_norm": 8.645009994506836, |
|
"learning_rate": 8.639676646793382e-07, |
|
"loss": 1.4106, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.06514657980456026, |
|
"grad_norm": 12.307551383972168, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 1.9273, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06537516429510258, |
|
"grad_norm": 11.830700874328613, |
|
"learning_rate": 6.61923394371039e-07, |
|
"loss": 2.3673, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.0656037487856449, |
|
"grad_norm": 10.462738037109375, |
|
"learning_rate": 5.7091402514442e-07, |
|
"loss": 1.6951, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.0658323332761872, |
|
"grad_norm": 11.972997665405273, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 2.0444, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.06606091776672952, |
|
"grad_norm": 18.008352279663086, |
|
"learning_rate": 4.089824229369155e-07, |
|
"loss": 2.4141, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.06628950225727184, |
|
"grad_norm": 11.071226119995117, |
|
"learning_rate": 3.380821129028489e-07, |
|
"loss": 1.8623, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.06651808674781416, |
|
"grad_norm": 15.886430740356445, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 2.2248, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.06674667123835648, |
|
"grad_norm": 14.11369800567627, |
|
"learning_rate": 2.1646046750978254e-07, |
|
"loss": 3.1426, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.0669752557288988, |
|
"grad_norm": 17.461822509765625, |
|
"learning_rate": 1.657555977746972e-07, |
|
"loss": 3.738, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.0672038402194411, |
|
"grad_norm": 9.49361801147461, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 3.9952, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.06743242470998342, |
|
"grad_norm": 11.396438598632812, |
|
"learning_rate": 8.459208643659122e-08, |
|
"loss": 4.4705, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.06766100920052574, |
|
"grad_norm": 10.36643123626709, |
|
"learning_rate": 5.4144433073771707e-08, |
|
"loss": 4.337, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.06788959369106806, |
|
"grad_norm": 11.684181213378906, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 4.1834, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.06811817818161038, |
|
"grad_norm": 8.283185958862305, |
|
"learning_rate": 1.3537941026914303e-08, |
|
"loss": 2.6821, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.0683467626721527, |
|
"grad_norm": 10.205315589904785, |
|
"learning_rate": 3.384599811889766e-09, |
|
"loss": 3.7379, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.068575347162695, |
|
"grad_norm": 11.798584938049316, |
|
"learning_rate": 0.0, |
|
"loss": 3.3263, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.068575347162695, |
|
"eval_loss": 0.530606210231781, |
|
"eval_runtime": 1331.9933, |
|
"eval_samples_per_second": 5.532, |
|
"eval_steps_per_second": 2.766, |
|
"step": 300 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 300, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.686338196994785e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|