|
{ |
|
"best_metric": 11.01180648803711, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 1.0066445182724253, |
|
"eval_steps": 100, |
|
"global_step": 113, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008859357696566999, |
|
"grad_norm": 1.7000117301940918, |
|
"learning_rate": 2e-05, |
|
"loss": 88.7471, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.008859357696566999, |
|
"eval_loss": 11.089943885803223, |
|
"eval_runtime": 1.0596, |
|
"eval_samples_per_second": 179.311, |
|
"eval_steps_per_second": 45.3, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017718715393133997, |
|
"grad_norm": 1.9952071905136108, |
|
"learning_rate": 4e-05, |
|
"loss": 88.7374, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.026578073089700997, |
|
"grad_norm": 1.8046919107437134, |
|
"learning_rate": 6e-05, |
|
"loss": 88.7934, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.035437430786267994, |
|
"grad_norm": 1.8296611309051514, |
|
"learning_rate": 8e-05, |
|
"loss": 88.7156, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.044296788482835, |
|
"grad_norm": 1.87436044216156, |
|
"learning_rate": 0.0001, |
|
"loss": 88.7446, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.053156146179401995, |
|
"grad_norm": 1.8545674085617065, |
|
"learning_rate": 0.00012, |
|
"loss": 88.8033, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06201550387596899, |
|
"grad_norm": 1.7815138101577759, |
|
"learning_rate": 0.00014, |
|
"loss": 88.7283, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07087486157253599, |
|
"grad_norm": 2.0521814823150635, |
|
"learning_rate": 0.00016, |
|
"loss": 88.7129, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.07973421926910298, |
|
"grad_norm": 1.9217674732208252, |
|
"learning_rate": 0.00018, |
|
"loss": 88.659, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08859357696567, |
|
"grad_norm": 1.9446423053741455, |
|
"learning_rate": 0.0002, |
|
"loss": 88.6336, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09745293466223699, |
|
"grad_norm": 1.8381359577178955, |
|
"learning_rate": 0.00019995348836233516, |
|
"loss": 88.7198, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10631229235880399, |
|
"grad_norm": 1.9090080261230469, |
|
"learning_rate": 0.00019981399671598939, |
|
"loss": 88.6303, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11517165005537099, |
|
"grad_norm": 1.8912723064422607, |
|
"learning_rate": 0.00019958165482066094, |
|
"loss": 88.6177, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12403100775193798, |
|
"grad_norm": 1.9282634258270264, |
|
"learning_rate": 0.0001992566788083908, |
|
"loss": 88.5971, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.132890365448505, |
|
"grad_norm": 1.6373441219329834, |
|
"learning_rate": 0.00019883937098250963, |
|
"loss": 88.6673, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14174972314507198, |
|
"grad_norm": 1.9328405857086182, |
|
"learning_rate": 0.00019833011953642525, |
|
"loss": 88.632, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.150609080841639, |
|
"grad_norm": 2.005837917327881, |
|
"learning_rate": 0.0001977293981925125, |
|
"loss": 88.6044, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.15946843853820597, |
|
"grad_norm": 1.9346294403076172, |
|
"learning_rate": 0.00019703776576144105, |
|
"loss": 88.6249, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.16832779623477298, |
|
"grad_norm": 1.9649502038955688, |
|
"learning_rate": 0.0001962558656223516, |
|
"loss": 88.5367, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17718715393134, |
|
"grad_norm": 1.8136121034622192, |
|
"learning_rate": 0.00019538442512436328, |
|
"loss": 88.5989, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.18604651162790697, |
|
"grad_norm": 2.0163049697875977, |
|
"learning_rate": 0.00019442425490996988, |
|
"loss": 88.6022, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19490586932447398, |
|
"grad_norm": 1.772336483001709, |
|
"learning_rate": 0.00019337624816095358, |
|
"loss": 88.5585, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.20376522702104097, |
|
"grad_norm": 1.5951476097106934, |
|
"learning_rate": 0.00019224137976751795, |
|
"loss": 88.5738, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21262458471760798, |
|
"grad_norm": 1.623684287071228, |
|
"learning_rate": 0.00019102070542141328, |
|
"loss": 88.5241, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.22148394241417496, |
|
"grad_norm": 1.7089464664459229, |
|
"learning_rate": 0.00018971536063389744, |
|
"loss": 88.5202, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.23034330011074197, |
|
"grad_norm": 1.8459341526031494, |
|
"learning_rate": 0.00018832655967944607, |
|
"loss": 88.4915, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23920265780730898, |
|
"grad_norm": 1.7073566913604736, |
|
"learning_rate": 0.0001868555944661949, |
|
"loss": 88.4679, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24806201550387597, |
|
"grad_norm": 1.6638760566711426, |
|
"learning_rate": 0.00018530383333416418, |
|
"loss": 88.5159, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.25692137320044295, |
|
"grad_norm": 1.7694356441497803, |
|
"learning_rate": 0.0001836727197823842, |
|
"loss": 88.4419, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.26578073089701, |
|
"grad_norm": 1.5755650997161865, |
|
"learning_rate": 0.00018196377112610526, |
|
"loss": 88.5145, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27464008859357697, |
|
"grad_norm": 1.6858540773391724, |
|
"learning_rate": 0.00018017857708534107, |
|
"loss": 88.4256, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.28349944629014395, |
|
"grad_norm": 1.6921030282974243, |
|
"learning_rate": 0.00017831879830605937, |
|
"loss": 88.4547, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.292358803986711, |
|
"grad_norm": 1.6553444862365723, |
|
"learning_rate": 0.0001763861648153945, |
|
"loss": 88.444, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.301218161683278, |
|
"grad_norm": 1.6478825807571411, |
|
"learning_rate": 0.0001743824744123196, |
|
"loss": 88.4493, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.31007751937984496, |
|
"grad_norm": 1.6460453271865845, |
|
"learning_rate": 0.00017230959099527512, |
|
"loss": 88.4299, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.31893687707641194, |
|
"grad_norm": 1.63612699508667, |
|
"learning_rate": 0.00017016944282830933, |
|
"loss": 88.3685, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.327796234772979, |
|
"grad_norm": 1.7421164512634277, |
|
"learning_rate": 0.00016796402074734402, |
|
"loss": 88.4323, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.33665559246954596, |
|
"grad_norm": 1.6525160074234009, |
|
"learning_rate": 0.00016569537630823383, |
|
"loss": 88.4168, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.34551495016611294, |
|
"grad_norm": 1.7259595394134521, |
|
"learning_rate": 0.00016336561987834153, |
|
"loss": 88.3543, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.35437430786268, |
|
"grad_norm": 1.5089702606201172, |
|
"learning_rate": 0.00016097691867340545, |
|
"loss": 88.3855, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.36323366555924697, |
|
"grad_norm": 1.7765790224075317, |
|
"learning_rate": 0.00015853149474152423, |
|
"loss": 88.4514, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.37209302325581395, |
|
"grad_norm": 1.8243776559829712, |
|
"learning_rate": 0.00015603162289613503, |
|
"loss": 88.4046, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 1.8020275831222534, |
|
"learning_rate": 0.00015347962859990744, |
|
"loss": 88.3666, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.38981173864894797, |
|
"grad_norm": 1.634013295173645, |
|
"learning_rate": 0.00015087788580152206, |
|
"loss": 88.4025, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.39867109634551495, |
|
"grad_norm": 1.8167768716812134, |
|
"learning_rate": 0.00014822881472734562, |
|
"loss": 88.3001, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.40753045404208194, |
|
"grad_norm": 1.8726773262023926, |
|
"learning_rate": 0.0001455348796300571, |
|
"loss": 88.2561, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.416389811738649, |
|
"grad_norm": 1.6560800075531006, |
|
"learning_rate": 0.0001427985864963193, |
|
"loss": 88.3241, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.42524916943521596, |
|
"grad_norm": 1.8344228267669678, |
|
"learning_rate": 0.0001400224807156278, |
|
"loss": 88.3057, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.43410852713178294, |
|
"grad_norm": 1.7484591007232666, |
|
"learning_rate": 0.00013720914471250644, |
|
"loss": 88.3512, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4429678848283499, |
|
"grad_norm": 1.8629359006881714, |
|
"learning_rate": 0.00013436119554425133, |
|
"loss": 88.2738, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.45182724252491696, |
|
"grad_norm": 1.7476680278778076, |
|
"learning_rate": 0.0001314812824664585, |
|
"loss": 88.2808, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.46068660022148394, |
|
"grad_norm": 1.9814788103103638, |
|
"learning_rate": 0.0001285720844685996, |
|
"loss": 88.268, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4695459579180509, |
|
"grad_norm": 1.8949189186096191, |
|
"learning_rate": 0.00012563630778193805, |
|
"loss": 88.2733, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.47840531561461797, |
|
"grad_norm": 1.5467251539230347, |
|
"learning_rate": 0.00012267668336210413, |
|
"loss": 88.3703, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.48726467331118495, |
|
"grad_norm": 1.8193094730377197, |
|
"learning_rate": 0.00011969596434867063, |
|
"loss": 88.2722, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.49612403100775193, |
|
"grad_norm": 1.8863904476165771, |
|
"learning_rate": 0.00011669692350409223, |
|
"loss": 88.2492, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5049833887043189, |
|
"grad_norm": 1.7994129657745361, |
|
"learning_rate": 0.00011368235063439103, |
|
"loss": 88.1733, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5138427464008859, |
|
"grad_norm": 1.7645139694213867, |
|
"learning_rate": 0.00011065504999398762, |
|
"loss": 88.2902, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5227021040974529, |
|
"grad_norm": 1.7159887552261353, |
|
"learning_rate": 0.00010761783767709182, |
|
"loss": 88.2566, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.53156146179402, |
|
"grad_norm": 1.5432287454605103, |
|
"learning_rate": 0.00010457353899807946, |
|
"loss": 88.2397, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.540420819490587, |
|
"grad_norm": 1.6790579557418823, |
|
"learning_rate": 0.0001015249858632926, |
|
"loss": 88.2717, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5492801771871539, |
|
"grad_norm": 1.6221708059310913, |
|
"learning_rate": 9.847501413670742e-05, |
|
"loss": 88.1482, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5581395348837209, |
|
"grad_norm": 1.6908060312271118, |
|
"learning_rate": 9.542646100192056e-05, |
|
"loss": 88.1413, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5669988925802879, |
|
"grad_norm": 1.5770460367202759, |
|
"learning_rate": 9.238216232290822e-05, |
|
"loss": 88.2858, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5758582502768549, |
|
"grad_norm": 1.6058683395385742, |
|
"learning_rate": 8.93449500060124e-05, |
|
"loss": 88.2268, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.584717607973422, |
|
"grad_norm": 1.5394655466079712, |
|
"learning_rate": 8.6317649365609e-05, |
|
"loss": 88.2221, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.593576965669989, |
|
"grad_norm": 1.5921432971954346, |
|
"learning_rate": 8.33030764959078e-05, |
|
"loss": 88.1241, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.602436323366556, |
|
"grad_norm": 1.553252935409546, |
|
"learning_rate": 8.030403565132942e-05, |
|
"loss": 88.1573, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6112956810631229, |
|
"grad_norm": 1.7506428956985474, |
|
"learning_rate": 7.732331663789592e-05, |
|
"loss": 88.2591, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6201550387596899, |
|
"grad_norm": 1.4636502265930176, |
|
"learning_rate": 7.436369221806201e-05, |
|
"loss": 88.2353, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6290143964562569, |
|
"grad_norm": 1.5952634811401367, |
|
"learning_rate": 7.142791553140045e-05, |
|
"loss": 88.1222, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6378737541528239, |
|
"grad_norm": 1.4096498489379883, |
|
"learning_rate": 6.851871753354153e-05, |
|
"loss": 88.1584, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.646733111849391, |
|
"grad_norm": 1.4336620569229126, |
|
"learning_rate": 6.563880445574873e-05, |
|
"loss": 88.2297, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.655592469545958, |
|
"grad_norm": 1.530969500541687, |
|
"learning_rate": 6.279085528749359e-05, |
|
"loss": 88.0665, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6644518272425249, |
|
"grad_norm": 1.375839114189148, |
|
"learning_rate": 5.9977519284372194e-05, |
|
"loss": 88.1804, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6733111849390919, |
|
"grad_norm": 1.5607173442840576, |
|
"learning_rate": 5.720141350368072e-05, |
|
"loss": 88.1716, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6821705426356589, |
|
"grad_norm": 1.380002498626709, |
|
"learning_rate": 5.446512036994287e-05, |
|
"loss": 88.2189, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6910299003322259, |
|
"grad_norm": 1.3200513124465942, |
|
"learning_rate": 5.177118527265438e-05, |
|
"loss": 88.1267, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.6998892580287929, |
|
"grad_norm": 1.3193778991699219, |
|
"learning_rate": 4.912211419847794e-05, |
|
"loss": 88.1561, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.70874861572536, |
|
"grad_norm": 1.5167840719223022, |
|
"learning_rate": 4.652037140009259e-05, |
|
"loss": 88.1425, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.717607973421927, |
|
"grad_norm": 1.4421601295471191, |
|
"learning_rate": 4.3968377103865024e-05, |
|
"loss": 88.1163, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7264673311184939, |
|
"grad_norm": 1.406391978263855, |
|
"learning_rate": 4.146850525847579e-05, |
|
"loss": 88.1702, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7353266888150609, |
|
"grad_norm": 1.4038128852844238, |
|
"learning_rate": 3.902308132659457e-05, |
|
"loss": 88.0944, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7441860465116279, |
|
"grad_norm": 1.3674670457839966, |
|
"learning_rate": 3.663438012165848e-05, |
|
"loss": 88.1737, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7530454042081949, |
|
"grad_norm": 1.3968989849090576, |
|
"learning_rate": 3.430462369176619e-05, |
|
"loss": 88.1595, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 1.4933903217315674, |
|
"learning_rate": 3.203597925265598e-05, |
|
"loss": 88.2609, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.770764119601329, |
|
"grad_norm": 1.5437737703323364, |
|
"learning_rate": 2.98305571716907e-05, |
|
"loss": 88.1411, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7796234772978959, |
|
"grad_norm": 1.2470422983169556, |
|
"learning_rate": 2.769040900472488e-05, |
|
"loss": 88.1128, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7884828349944629, |
|
"grad_norm": 1.2969799041748047, |
|
"learning_rate": 2.5617525587680402e-05, |
|
"loss": 88.2228, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.7973421926910299, |
|
"grad_norm": 1.4092624187469482, |
|
"learning_rate": 2.3613835184605525e-05, |
|
"loss": 88.2412, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8062015503875969, |
|
"grad_norm": 1.2208858728408813, |
|
"learning_rate": 2.1681201693940668e-05, |
|
"loss": 88.0666, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8150609080841639, |
|
"grad_norm": 1.3910318613052368, |
|
"learning_rate": 1.982142291465896e-05, |
|
"loss": 88.1084, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8239202657807309, |
|
"grad_norm": 1.4518864154815674, |
|
"learning_rate": 1.8036228873894746e-05, |
|
"loss": 88.1844, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.832779623477298, |
|
"grad_norm": 1.4225211143493652, |
|
"learning_rate": 1.6327280217615792e-05, |
|
"loss": 88.1233, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8416389811738649, |
|
"grad_norm": 1.3186362981796265, |
|
"learning_rate": 1.4696166665835853e-05, |
|
"loss": 88.1479, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8504983388704319, |
|
"grad_norm": 1.3430944681167603, |
|
"learning_rate": 1.3144405533805138e-05, |
|
"loss": 88.077, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8593576965669989, |
|
"grad_norm": 1.212870478630066, |
|
"learning_rate": 1.167344032055394e-05, |
|
"loss": 88.107, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8682170542635659, |
|
"grad_norm": 1.360379934310913, |
|
"learning_rate": 1.02846393661026e-05, |
|
"loss": 88.0353, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.8770764119601329, |
|
"grad_norm": 1.258919596672058, |
|
"learning_rate": 8.979294578586738e-06, |
|
"loss": 88.0631, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8859357696566998, |
|
"grad_norm": 1.4021327495574951, |
|
"learning_rate": 7.758620232482084e-06, |
|
"loss": 88.0937, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8859357696566998, |
|
"eval_loss": 11.01180648803711, |
|
"eval_runtime": 0.6808, |
|
"eval_samples_per_second": 279.069, |
|
"eval_steps_per_second": 70.502, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8947951273532669, |
|
"grad_norm": 1.4452682733535767, |
|
"learning_rate": 6.623751839046455e-06, |
|
"loss": 88.1532, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9036544850498339, |
|
"grad_norm": 1.3395559787750244, |
|
"learning_rate": 5.575745090030138e-06, |
|
"loss": 88.1788, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9125138427464009, |
|
"grad_norm": 1.4853826761245728, |
|
"learning_rate": 4.61557487563673e-06, |
|
"loss": 88.1785, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9213732004429679, |
|
"grad_norm": 1.385549545288086, |
|
"learning_rate": 3.7441343776484117e-06, |
|
"loss": 88.1602, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 1.4303137063980103, |
|
"learning_rate": 2.9622342385589254e-06, |
|
"loss": 88.0615, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9390919158361019, |
|
"grad_norm": 1.3606700897216797, |
|
"learning_rate": 2.2706018074875045e-06, |
|
"loss": 88.1822, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9479512735326688, |
|
"grad_norm": 1.3698482513427734, |
|
"learning_rate": 1.6698804635747579e-06, |
|
"loss": 88.1556, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9568106312292359, |
|
"grad_norm": 1.4697903394699097, |
|
"learning_rate": 1.160629017490389e-06, |
|
"loss": 88.1125, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9656699889258029, |
|
"grad_norm": 1.2570511102676392, |
|
"learning_rate": 7.433211916092142e-07, |
|
"loss": 88.0736, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9745293466223699, |
|
"grad_norm": 1.4869191646575928, |
|
"learning_rate": 4.1834517933907467e-07, |
|
"loss": 88.137, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9833887043189369, |
|
"grad_norm": 1.265148401260376, |
|
"learning_rate": 1.8600328401061629e-07, |
|
"loss": 88.1054, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9922480620155039, |
|
"grad_norm": 1.3339345455169678, |
|
"learning_rate": 4.651163766484779e-08, |
|
"loss": 88.0929, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0066445182724253, |
|
"grad_norm": 1.3491634130477905, |
|
"learning_rate": 0.0, |
|
"loss": 88.1079, |
|
"step": 113 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 113, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 2, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1692024176640.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|