|
{"current_steps": 5, "total_steps": 354, "loss": 4.1473, "learning_rate": 4.9975392245612254e-05, "epoch": 0.042283298097251586, "percentage": 1.41, "elapsed_time": "0:00:46", "remaining_time": "0:53:44", "throughput": 1480.12, "total_tokens": 68384} |
|
{"current_steps": 10, "total_steps": 354, "loss": 4.021, "learning_rate": 4.9901617425775067e-05, "epoch": 0.08456659619450317, "percentage": 2.82, "elapsed_time": "0:01:28", "remaining_time": "0:50:54", "throughput": 1444.14, "total_tokens": 128224} |
|
{"current_steps": 15, "total_steps": 354, "loss": 3.8212, "learning_rate": 4.9778820775100185e-05, "epoch": 0.12684989429175475, "percentage": 4.24, "elapsed_time": "0:02:20", "remaining_time": "0:53:00", "throughput": 1405.31, "total_tokens": 197760} |
|
{"current_steps": 20, "total_steps": 354, "loss": 3.7653, "learning_rate": 4.9607244033573156e-05, "epoch": 0.16913319238900634, "percentage": 5.65, "elapsed_time": "0:03:00", "remaining_time": "0:50:08", "throughput": 1458.76, "total_tokens": 262832} |
|
{"current_steps": 25, "total_steps": 354, "loss": 3.7434, "learning_rate": 4.93872249706591e-05, "epoch": 0.21141649048625794, "percentage": 7.06, "elapsed_time": "0:03:50", "remaining_time": "0:50:36", "throughput": 1445.04, "total_tokens": 333472} |
|
{"current_steps": 30, "total_steps": 354, "loss": 3.5488, "learning_rate": 4.91191967203629e-05, "epoch": 0.2536997885835095, "percentage": 8.47, "elapsed_time": "0:04:31", "remaining_time": "0:48:47", "throughput": 1452.21, "total_tokens": 393616} |
|
{"current_steps": 35, "total_steps": 354, "loss": 3.5732, "learning_rate": 4.8803686928552736e-05, "epoch": 0.2959830866807611, "percentage": 9.89, "elapsed_time": "0:05:13", "remaining_time": "0:47:41", "throughput": 1459.49, "total_tokens": 458240} |
|
{"current_steps": 40, "total_steps": 354, "loss": 3.7108, "learning_rate": 4.84413167142257e-05, "epoch": 0.3382663847780127, "percentage": 11.3, "elapsed_time": "0:05:59", "remaining_time": "0:46:58", "throughput": 1460.9, "total_tokens": 524576} |
|
{"current_steps": 45, "total_steps": 354, "loss": 3.6871, "learning_rate": 4.803279944676032e-05, "epoch": 0.38054968287526425, "percentage": 12.71, "elapsed_time": "0:06:40", "remaining_time": "0:45:50", "throughput": 1477.59, "total_tokens": 591856} |
|
{"current_steps": 50, "total_steps": 354, "loss": 3.286, "learning_rate": 4.7578939341563095e-05, "epoch": 0.42283298097251587, "percentage": 14.12, "elapsed_time": "0:07:28", "remaining_time": "0:45:25", "throughput": 1462.87, "total_tokens": 655648} |
|
{"current_steps": 55, "total_steps": 354, "loss": 3.5377, "learning_rate": 4.70806298768736e-05, "epoch": 0.46511627906976744, "percentage": 15.54, "elapsed_time": "0:08:12", "remaining_time": "0:44:37", "throughput": 1464.28, "total_tokens": 721280} |
|
{"current_steps": 60, "total_steps": 354, "loss": 3.56, "learning_rate": 4.653885203484515e-05, "epoch": 0.507399577167019, "percentage": 16.95, "elapsed_time": "0:08:57", "remaining_time": "0:43:56", "throughput": 1458.6, "total_tokens": 784688} |
|
{"current_steps": 65, "total_steps": 354, "loss": 3.4937, "learning_rate": 4.595467237036329e-05, "epoch": 0.5496828752642706, "percentage": 18.36, "elapsed_time": "0:09:39", "remaining_time": "0:42:55", "throughput": 1468.84, "total_tokens": 850848} |
|
{"current_steps": 70, "total_steps": 354, "loss": 3.3823, "learning_rate": 4.532924091140417e-05, "epoch": 0.5919661733615222, "percentage": 19.77, "elapsed_time": "0:10:19", "remaining_time": "0:41:52", "throughput": 1473.43, "total_tokens": 912480} |
|
{"current_steps": 75, "total_steps": 354, "loss": 3.3798, "learning_rate": 4.466378889506607e-05, "epoch": 0.6342494714587738, "percentage": 21.19, "elapsed_time": "0:11:00", "remaining_time": "0:40:57", "throughput": 1481.07, "total_tokens": 978448} |
|
{"current_steps": 80, "total_steps": 354, "loss": 3.3044, "learning_rate": 4.395962634373097e-05, "epoch": 0.6765327695560254, "percentage": 22.6, "elapsed_time": "0:11:41", "remaining_time": "0:40:03", "throughput": 1483.67, "total_tokens": 1041280} |
|
{"current_steps": 85, "total_steps": 354, "loss": 3.3661, "learning_rate": 4.3218139486127854e-05, "epoch": 0.718816067653277, "percentage": 24.01, "elapsed_time": "0:12:21", "remaining_time": "0:39:06", "throughput": 1486.88, "total_tokens": 1102224} |
|
{"current_steps": 90, "total_steps": 354, "loss": 3.3829, "learning_rate": 4.2440788028374624e-05, "epoch": 0.7610993657505285, "percentage": 25.42, "elapsed_time": "0:13:03", "remaining_time": "0:38:17", "throughput": 1489.19, "total_tokens": 1166576} |
|
{"current_steps": 95, "total_steps": 354, "loss": 3.2241, "learning_rate": 4.1629102280370904e-05, "epoch": 0.8033826638477801, "percentage": 26.84, "elapsed_time": "0:13:51", "remaining_time": "0:37:47", "throughput": 1479.23, "total_tokens": 1230096} |
|
{"current_steps": 100, "total_steps": 354, "loss": 3.0931, "learning_rate": 4.0784680143198836e-05, "epoch": 0.8456659619450317, "percentage": 28.25, "elapsed_time": "0:14:41", "remaining_time": "0:37:19", "throughput": 1471.86, "total_tokens": 1297968} |
|
{"current_steps": 105, "total_steps": 354, "loss": 3.3581, "learning_rate": 3.990918396346254e-05, "epoch": 0.8879492600422833, "percentage": 29.66, "elapsed_time": "0:15:22", "remaining_time": "0:36:28", "throughput": 1475.74, "total_tokens": 1361760} |
|
{"current_steps": 110, "total_steps": 354, "loss": 3.2308, "learning_rate": 3.900433726075865e-05, "epoch": 0.9302325581395349, "percentage": 31.07, "elapsed_time": "0:16:00", "remaining_time": "0:35:29", "throughput": 1481.17, "total_tokens": 1422096} |
|
{"current_steps": 115, "total_steps": 354, "loss": 3.1427, "learning_rate": 3.8071921334720696e-05, "epoch": 0.9725158562367865, "percentage": 32.49, "elapsed_time": "0:16:50", "remaining_time": "0:35:00", "throughput": 1475.67, "total_tokens": 1491120} |
|
{"current_steps": 120, "total_steps": 354, "loss": 3.0988, "learning_rate": 3.711377175831626e-05, "epoch": 1.014799154334038, "percentage": 33.9, "elapsed_time": "0:17:33", "remaining_time": "0:34:13", "throughput": 1477.74, "total_tokens": 1556440} |
|
{"current_steps": 125, "total_steps": 354, "loss": 3.0695, "learning_rate": 3.613177476430079e-05, "epoch": 1.0570824524312896, "percentage": 35.31, "elapsed_time": "0:18:23", "remaining_time": "0:33:40", "throughput": 1472.44, "total_tokens": 1624200} |
|
{"current_steps": 130, "total_steps": 354, "loss": 3.1109, "learning_rate": 3.512786353194134e-05, "epoch": 1.0993657505285412, "percentage": 36.72, "elapsed_time": "0:19:12", "remaining_time": "0:33:06", "throughput": 1463.05, "total_tokens": 1686600} |
|
{"current_steps": 135, "total_steps": 354, "loss": 3.116, "learning_rate": 3.410401438132056e-05, "epoch": 1.1416490486257929, "percentage": 38.14, "elapsed_time": "0:19:53", "remaining_time": "0:32:16", "throughput": 1470.58, "total_tokens": 1755144} |
|
{"current_steps": 140, "total_steps": 354, "loss": 2.9528, "learning_rate": 3.3062242882712724e-05, "epoch": 1.1839323467230445, "percentage": 39.55, "elapsed_time": "0:20:36", "remaining_time": "0:31:29", "throughput": 1471.79, "total_tokens": 1819672} |
|
{"current_steps": 145, "total_steps": 354, "loss": 2.8494, "learning_rate": 3.200459988869111e-05, "epoch": 1.226215644820296, "percentage": 40.96, "elapsed_time": "0:21:23", "remaining_time": "0:30:50", "throughput": 1469.07, "total_tokens": 1886136} |
|
{"current_steps": 150, "total_steps": 354, "loss": 3.1525, "learning_rate": 3.093316749677788e-05, "epoch": 1.2684989429175475, "percentage": 42.37, "elapsed_time": "0:22:03", "remaining_time": "0:29:59", "throughput": 1471.8, "total_tokens": 1947656} |
|
{"current_steps": 155, "total_steps": 354, "loss": 2.8991, "learning_rate": 2.985005495058446e-05, "epoch": 1.3107822410147991, "percentage": 43.79, "elapsed_time": "0:22:45", "remaining_time": "0:29:12", "throughput": 1474.89, "total_tokens": 2013272} |
|
{"current_steps": 160, "total_steps": 354, "loss": 3.026, "learning_rate": 2.875739448751176e-05, "epoch": 1.3530655391120507, "percentage": 45.2, "elapsed_time": "0:23:28", "remaining_time": "0:28:28", "throughput": 1476.25, "total_tokens": 2079816} |
|
{"current_steps": 165, "total_steps": 354, "loss": 2.8813, "learning_rate": 2.7657337141184138e-05, "epoch": 1.3953488372093024, "percentage": 46.61, "elapsed_time": "0:24:10", "remaining_time": "0:27:41", "throughput": 1479.42, "total_tokens": 2146008} |
|
{"current_steps": 170, "total_steps": 354, "loss": 2.7721, "learning_rate": 2.655204850688085e-05, "epoch": 1.437632135306554, "percentage": 48.02, "elapsed_time": "0:25:00", "remaining_time": "0:27:04", "throughput": 1474.48, "total_tokens": 2212840} |
|
{"current_steps": 175, "total_steps": 354, "loss": 2.737, "learning_rate": 2.5443704478301154e-05, "epoch": 1.4799154334038054, "percentage": 49.44, "elapsed_time": "0:25:43", "remaining_time": "0:26:18", "throughput": 1476.76, "total_tokens": 2278824} |
|
{"current_steps": 180, "total_steps": 354, "loss": 2.6645, "learning_rate": 2.433448696405563e-05, "epoch": 1.522198731501057, "percentage": 50.85, "elapsed_time": "0:26:27", "remaining_time": "0:25:34", "throughput": 1473.77, "total_tokens": 2339768} |
|
{"current_steps": 185, "total_steps": 354, "loss": 2.7233, "learning_rate": 2.3226579592316538e-05, "epoch": 1.5644820295983086, "percentage": 52.26, "elapsed_time": "0:27:13", "remaining_time": "0:24:52", "throughput": 1473.95, "total_tokens": 2407976} |
|
{"current_steps": 190, "total_steps": 354, "loss": 2.7463, "learning_rate": 2.2122163412082927e-05, "epoch": 1.6067653276955602, "percentage": 53.67, "elapsed_time": "0:27:59", "remaining_time": "0:24:09", "throughput": 1470.22, "total_tokens": 2469176} |
|
{"current_steps": 195, "total_steps": 354, "loss": 2.8348, "learning_rate": 2.1023412599523204e-05, "epoch": 1.6490486257928119, "percentage": 55.08, "elapsed_time": "0:28:33", "remaining_time": "0:23:17", "throughput": 1475.48, "total_tokens": 2528152} |
|
{"current_steps": 200, "total_steps": 354, "loss": 2.7732, "learning_rate": 1.993249017784766e-05, "epoch": 1.6913319238900635, "percentage": 56.5, "elapsed_time": "0:29:16", "remaining_time": "0:22:32", "throughput": 1473.63, "total_tokens": 2588984} |
|
{"current_steps": 205, "total_steps": 354, "loss": 2.7693, "learning_rate": 1.8851543759137007e-05, "epoch": 1.733615221987315, "percentage": 57.91, "elapsed_time": "0:30:00", "remaining_time": "0:21:48", "throughput": 1475.66, "total_tokens": 2657016} |
|
{"current_steps": 210, "total_steps": 354, "loss": 2.6524, "learning_rate": 1.778270131650948e-05, "epoch": 1.7758985200845667, "percentage": 59.32, "elapsed_time": "0:30:44", "remaining_time": "0:21:04", "throughput": 1474.37, "total_tokens": 2719656} |
|
{"current_steps": 215, "total_steps": 354, "loss": 2.7338, "learning_rate": 1.672806699494966e-05, "epoch": 1.8181818181818183, "percentage": 60.73, "elapsed_time": "0:31:36", "remaining_time": "0:20:25", "throughput": 1469.49, "total_tokens": 2786168} |
|
{"current_steps": 220, "total_steps": 354, "loss": 2.7467, "learning_rate": 1.5689716969045848e-05, "epoch": 1.8604651162790697, "percentage": 62.15, "elapsed_time": "0:32:15", "remaining_time": "0:19:38", "throughput": 1472.13, "total_tokens": 2849400} |
|
{"current_steps": 225, "total_steps": 354, "loss": 2.5769, "learning_rate": 1.4669695355790552e-05, "epoch": 1.9027484143763214, "percentage": 63.56, "elapsed_time": "0:33:03", "remaining_time": "0:18:57", "throughput": 1470.76, "total_tokens": 2917000} |
|
{"current_steps": 230, "total_steps": 354, "loss": 2.784, "learning_rate": 1.3670010190490073e-05, "epoch": 1.945031712473573, "percentage": 64.97, "elapsed_time": "0:33:43", "remaining_time": "0:18:11", "throughput": 1472.9, "total_tokens": 2980712} |
|
{"current_steps": 235, "total_steps": 354, "loss": 2.7729, "learning_rate": 1.2692629473705453e-05, "epoch": 1.9873150105708244, "percentage": 66.38, "elapsed_time": "0:34:25", "remaining_time": "0:17:25", "throughput": 1474.59, "total_tokens": 3045848} |
|
{"current_steps": 240, "total_steps": 354, "loss": 2.8171, "learning_rate": 1.173947729700644e-05, "epoch": 2.029598308668076, "percentage": 67.8, "elapsed_time": "0:35:08", "remaining_time": "0:16:41", "throughput": 1472.68, "total_tokens": 3105128} |
|
{"current_steps": 245, "total_steps": 354, "loss": 2.5436, "learning_rate": 1.081243005516571e-05, "epoch": 2.0718816067653276, "percentage": 69.21, "elapsed_time": "0:35:54", "remaining_time": "0:15:58", "throughput": 1473.99, "total_tokens": 3175016} |
|
{"current_steps": 250, "total_steps": 354, "loss": 2.4681, "learning_rate": 9.913312752249903e-06, "epoch": 2.1141649048625792, "percentage": 70.62, "elapsed_time": "0:36:42", "remaining_time": "0:15:16", "throughput": 1473.62, "total_tokens": 3245608} |
|
{"current_steps": 255, "total_steps": 354, "loss": 2.4036, "learning_rate": 9.043895408879505e-06, "epoch": 2.156448202959831, "percentage": 72.03, "elapsed_time": "0:37:29", "remaining_time": "0:14:33", "throughput": 1470.6, "total_tokens": 3308040} |
|
{"current_steps": 260, "total_steps": 354, "loss": 2.4283, "learning_rate": 8.20588957773018e-06, "epoch": 2.1987315010570825, "percentage": 73.45, "elapsed_time": "0:38:16", "remaining_time": "0:13:50", "throughput": 1469.81, "total_tokens": 3376152} |
|
{"current_steps": 265, "total_steps": 354, "loss": 2.6161, "learning_rate": 7.400944974135427e-06, "epoch": 2.241014799154334, "percentage": 74.86, "elapsed_time": "0:39:02", "remaining_time": "0:13:06", "throughput": 1467.98, "total_tokens": 3438504} |
|
{"current_steps": 270, "total_steps": 354, "loss": 2.6151, "learning_rate": 6.6306462284233234e-06, "epoch": 2.2832980972515857, "percentage": 76.27, "elapsed_time": "0:39:49", "remaining_time": "0:12:23", "throughput": 1468.59, "total_tokens": 3508488} |
|
{"current_steps": 275, "total_steps": 354, "loss": 2.5051, "learning_rate": 5.896509766381028e-06, "epoch": 2.3255813953488373, "percentage": 77.68, "elapsed_time": "0:40:29", "remaining_time": "0:11:37", "throughput": 1470.23, "total_tokens": 3572248} |
|
{"current_steps": 280, "total_steps": 354, "loss": 2.535, "learning_rate": 5.199980823988157e-06, "epoch": 2.367864693446089, "percentage": 79.1, "elapsed_time": "0:41:05", "remaining_time": "0:10:51", "throughput": 1471.11, "total_tokens": 3627304} |
|
{"current_steps": 285, "total_steps": 354, "loss": 2.4052, "learning_rate": 4.542430602295774e-06, "epoch": 2.41014799154334, "percentage": 80.51, "elapsed_time": "0:41:55", "remaining_time": "0:10:08", "throughput": 1469.55, "total_tokens": 3696504} |
|
{"current_steps": 290, "total_steps": 354, "loss": 2.6136, "learning_rate": 3.925153568052123e-06, "epoch": 2.452431289640592, "percentage": 81.92, "elapsed_time": "0:42:33", "remaining_time": "0:09:23", "throughput": 1471.44, "total_tokens": 3757976} |
|
{"current_steps": 295, "total_steps": 354, "loss": 2.2962, "learning_rate": 3.3493649053890326e-06, "epoch": 2.4947145877378434, "percentage": 83.33, "elapsed_time": "0:43:27", "remaining_time": "0:08:41", "throughput": 1468.71, "total_tokens": 3829192} |
|
{"current_steps": 300, "total_steps": 354, "loss": 2.6799, "learning_rate": 2.8161981235857143e-06, "epoch": 2.536997885835095, "percentage": 84.75, "elapsed_time": "0:44:14", "remaining_time": "0:07:57", "throughput": 1469.55, "total_tokens": 3900648} |
|
{"current_steps": 305, "total_steps": 354, "loss": 2.3375, "learning_rate": 2.3267028256193036e-06, "epoch": 2.5792811839323466, "percentage": 86.16, "elapsed_time": "0:45:11", "remaining_time": "0:07:15", "throughput": 1464.58, "total_tokens": 3970936} |
|
{"current_steps": 310, "total_steps": 354, "loss": 2.5113, "learning_rate": 1.881842641895104e-06, "epoch": 2.6215644820295982, "percentage": 87.57, "elapsed_time": "0:45:49", "remaining_time": "0:06:30", "throughput": 1466.98, "total_tokens": 4033192} |
|
{"current_steps": 315, "total_steps": 354, "loss": 2.5114, "learning_rate": 1.4824933332241692e-06, "epoch": 2.66384778012685, "percentage": 88.98, "elapsed_time": "0:46:30", "remaining_time": "0:05:45", "throughput": 1467.47, "total_tokens": 4095144} |
|
{"current_steps": 320, "total_steps": 354, "loss": 2.5342, "learning_rate": 1.129441066782702e-06, "epoch": 2.7061310782241015, "percentage": 90.4, "elapsed_time": "0:47:09", "remaining_time": "0:05:00", "throughput": 1469.81, "total_tokens": 4159320} |
|
{"current_steps": 325, "total_steps": 354, "loss": 2.4773, "learning_rate": 8.233808684473959e-07, "epoch": 2.748414376321353, "percentage": 91.81, "elapsed_time": "0:47:54", "remaining_time": "0:04:16", "throughput": 1470.41, "total_tokens": 4227128} |
|
{"current_steps": 330, "total_steps": 354, "loss": 2.4826, "learning_rate": 5.649152545533332e-07, "epoch": 2.7906976744186047, "percentage": 93.22, "elapsed_time": "0:48:39", "remaining_time": "0:03:32", "throughput": 1470.88, "total_tokens": 4293848} |
|
{"current_steps": 335, "total_steps": 354, "loss": 2.5766, "learning_rate": 3.5455304576806347e-07, "epoch": 2.8329809725158563, "percentage": 94.63, "elapsed_time": "0:49:22", "remaining_time": "0:02:48", "throughput": 1471.21, "total_tokens": 4358088} |
|
{"current_steps": 340, "total_steps": 354, "loss": 2.5021, "learning_rate": 1.927083654168854e-07, "epoch": 2.875264270613108, "percentage": 96.05, "elapsed_time": "0:49:59", "remaining_time": "0:02:03", "throughput": 1473.98, "total_tokens": 4421432} |
|
{"current_steps": 345, "total_steps": 354, "loss": 2.6718, "learning_rate": 7.969982423124689e-08, "epoch": 2.9175475687103596, "percentage": 97.46, "elapsed_time": "0:50:35", "remaining_time": "0:01:19", "throughput": 1476.86, "total_tokens": 4482632} |
|
{"current_steps": 350, "total_steps": 354, "loss": 2.3902, "learning_rate": 1.5749893125160954e-08, "epoch": 2.9598308668076108, "percentage": 98.87, "elapsed_time": "0:51:20", "remaining_time": "0:00:35", "throughput": 1475.87, "total_tokens": 4547096} |
|
{"current_steps": 354, "total_steps": 354, "epoch": 2.9936575052854124, "percentage": 100.0, "elapsed_time": "0:51:54", "remaining_time": "0:00:00", "throughput": 1476.36, "total_tokens": 4597544} |
|
|