Spaces:
Sleeping
Sleeping
{ | |
"best_metric": 0.24188034188034188, | |
"best_model_checkpoint": "/content/drive/MyDrive/model_ft_2/checkpoint-15000", | |
"epoch": 17.647058823529413, | |
"global_step": 15000, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.12, | |
"learning_rate": 5.000000000000001e-07, | |
"loss": 0.3163, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 1.0000000000000002e-06, | |
"loss": 0.2851, | |
"step": 200 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 1.5e-06, | |
"loss": 0.2783, | |
"step": 300 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 2.0000000000000003e-06, | |
"loss": 0.3097, | |
"step": 400 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 2.5e-06, | |
"loss": 0.286, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 3e-06, | |
"loss": 0.3008, | |
"step": 600 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 3.5e-06, | |
"loss": 0.2617, | |
"step": 700 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 4.000000000000001e-06, | |
"loss": 0.2949, | |
"step": 800 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 4.5e-06, | |
"loss": 0.2744, | |
"step": 900 | |
}, | |
{ | |
"epoch": 1.18, | |
"learning_rate": 5e-06, | |
"loss": 0.2932, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 5.500000000000001e-06, | |
"loss": 0.289, | |
"step": 1100 | |
}, | |
{ | |
"epoch": 1.41, | |
"learning_rate": 6e-06, | |
"loss": 0.2984, | |
"step": 1200 | |
}, | |
{ | |
"epoch": 1.53, | |
"learning_rate": 6.5000000000000004e-06, | |
"loss": 0.3028, | |
"step": 1300 | |
}, | |
{ | |
"epoch": 1.65, | |
"learning_rate": 7e-06, | |
"loss": 0.2607, | |
"step": 1400 | |
}, | |
{ | |
"epoch": 1.76, | |
"learning_rate": 7.500000000000001e-06, | |
"loss": 0.2906, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 1.88, | |
"learning_rate": 8.000000000000001e-06, | |
"loss": 0.2755, | |
"step": 1600 | |
}, | |
{ | |
"epoch": 2.0, | |
"learning_rate": 8.5e-06, | |
"loss": 0.2894, | |
"step": 1700 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 9e-06, | |
"loss": 0.282, | |
"step": 1800 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 9.5e-06, | |
"loss": 0.2715, | |
"step": 1900 | |
}, | |
{ | |
"epoch": 2.35, | |
"learning_rate": 1e-05, | |
"loss": 0.2845, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 2.47, | |
"learning_rate": 1e-05, | |
"loss": 0.2698, | |
"step": 2100 | |
}, | |
{ | |
"epoch": 2.59, | |
"learning_rate": 1e-05, | |
"loss": 0.2803, | |
"step": 2200 | |
}, | |
{ | |
"epoch": 2.71, | |
"learning_rate": 1e-05, | |
"loss": 0.2812, | |
"step": 2300 | |
}, | |
{ | |
"epoch": 2.82, | |
"learning_rate": 1e-05, | |
"loss": 0.2947, | |
"step": 2400 | |
}, | |
{ | |
"epoch": 2.94, | |
"learning_rate": 1e-05, | |
"loss": 0.2529, | |
"step": 2500 | |
}, | |
{ | |
"epoch": 3.06, | |
"learning_rate": 1e-05, | |
"loss": 0.298, | |
"step": 2600 | |
}, | |
{ | |
"epoch": 3.18, | |
"learning_rate": 1e-05, | |
"loss": 0.2671, | |
"step": 2700 | |
}, | |
{ | |
"epoch": 3.29, | |
"learning_rate": 1e-05, | |
"loss": 0.2619, | |
"step": 2800 | |
}, | |
{ | |
"epoch": 3.41, | |
"learning_rate": 1e-05, | |
"loss": 0.2546, | |
"step": 2900 | |
}, | |
{ | |
"epoch": 3.53, | |
"learning_rate": 1e-05, | |
"loss": 0.2776, | |
"step": 3000 | |
}, | |
{ | |
"epoch": 3.65, | |
"learning_rate": 1e-05, | |
"loss": 0.2869, | |
"step": 3100 | |
}, | |
{ | |
"epoch": 3.76, | |
"learning_rate": 1e-05, | |
"loss": 0.2664, | |
"step": 3200 | |
}, | |
{ | |
"epoch": 3.88, | |
"learning_rate": 1e-05, | |
"loss": 0.2893, | |
"step": 3300 | |
}, | |
{ | |
"epoch": 4.0, | |
"learning_rate": 1e-05, | |
"loss": 0.2691, | |
"step": 3400 | |
}, | |
{ | |
"epoch": 4.12, | |
"learning_rate": 1e-05, | |
"loss": 0.2729, | |
"step": 3500 | |
}, | |
{ | |
"epoch": 4.24, | |
"learning_rate": 1e-05, | |
"loss": 0.2606, | |
"step": 3600 | |
}, | |
{ | |
"epoch": 4.35, | |
"learning_rate": 1e-05, | |
"loss": 0.2731, | |
"step": 3700 | |
}, | |
{ | |
"epoch": 4.47, | |
"learning_rate": 1e-05, | |
"loss": 0.245, | |
"step": 3800 | |
}, | |
{ | |
"epoch": 4.59, | |
"learning_rate": 1e-05, | |
"loss": 0.2644, | |
"step": 3900 | |
}, | |
{ | |
"epoch": 4.71, | |
"learning_rate": 1e-05, | |
"loss": 0.2651, | |
"step": 4000 | |
}, | |
{ | |
"epoch": 4.82, | |
"learning_rate": 1e-05, | |
"loss": 0.2437, | |
"step": 4100 | |
}, | |
{ | |
"epoch": 4.94, | |
"learning_rate": 1e-05, | |
"loss": 0.2566, | |
"step": 4200 | |
}, | |
{ | |
"epoch": 5.06, | |
"learning_rate": 1e-05, | |
"loss": 0.2582, | |
"step": 4300 | |
}, | |
{ | |
"epoch": 5.18, | |
"learning_rate": 1e-05, | |
"loss": 0.2261, | |
"step": 4400 | |
}, | |
{ | |
"epoch": 5.29, | |
"learning_rate": 1e-05, | |
"loss": 0.2685, | |
"step": 4500 | |
}, | |
{ | |
"epoch": 5.41, | |
"learning_rate": 1e-05, | |
"loss": 0.2459, | |
"step": 4600 | |
}, | |
{ | |
"epoch": 5.53, | |
"learning_rate": 1e-05, | |
"loss": 0.2536, | |
"step": 4700 | |
}, | |
{ | |
"epoch": 5.65, | |
"learning_rate": 1e-05, | |
"loss": 0.2592, | |
"step": 4800 | |
}, | |
{ | |
"epoch": 5.76, | |
"learning_rate": 1e-05, | |
"loss": 0.2491, | |
"step": 4900 | |
}, | |
{ | |
"epoch": 5.88, | |
"learning_rate": 1e-05, | |
"loss": 0.2434, | |
"step": 5000 | |
}, | |
{ | |
"epoch": 5.88, | |
"eval_cer": 0.0821832842301132, | |
"eval_loss": 0.575729250907898, | |
"eval_runtime": 8.2591, | |
"eval_samples_per_second": 36.324, | |
"eval_steps_per_second": 18.162, | |
"eval_wer": 0.25256410256410255, | |
"step": 5000 | |
}, | |
{ | |
"epoch": 6.0, | |
"learning_rate": 1e-05, | |
"loss": 0.2547, | |
"step": 5100 | |
}, | |
{ | |
"epoch": 6.12, | |
"learning_rate": 1e-05, | |
"loss": 0.2459, | |
"step": 5200 | |
}, | |
{ | |
"epoch": 6.24, | |
"learning_rate": 1e-05, | |
"loss": 0.2566, | |
"step": 5300 | |
}, | |
{ | |
"epoch": 6.35, | |
"learning_rate": 1e-05, | |
"loss": 0.2498, | |
"step": 5400 | |
}, | |
{ | |
"epoch": 6.47, | |
"learning_rate": 1e-05, | |
"loss": 0.2492, | |
"step": 5500 | |
}, | |
{ | |
"epoch": 6.59, | |
"learning_rate": 1e-05, | |
"loss": 0.2491, | |
"step": 5600 | |
}, | |
{ | |
"epoch": 6.71, | |
"learning_rate": 1e-05, | |
"loss": 0.2519, | |
"step": 5700 | |
}, | |
{ | |
"epoch": 6.82, | |
"learning_rate": 1e-05, | |
"loss": 0.2426, | |
"step": 5800 | |
}, | |
{ | |
"epoch": 6.94, | |
"learning_rate": 1e-05, | |
"loss": 0.2175, | |
"step": 5900 | |
}, | |
{ | |
"epoch": 7.06, | |
"learning_rate": 1e-05, | |
"loss": 0.2506, | |
"step": 6000 | |
}, | |
{ | |
"epoch": 7.18, | |
"learning_rate": 1e-05, | |
"loss": 0.2342, | |
"step": 6100 | |
}, | |
{ | |
"epoch": 7.29, | |
"learning_rate": 1e-05, | |
"loss": 0.2418, | |
"step": 6200 | |
}, | |
{ | |
"epoch": 7.41, | |
"learning_rate": 1e-05, | |
"loss": 0.2345, | |
"step": 6300 | |
}, | |
{ | |
"epoch": 7.53, | |
"learning_rate": 1e-05, | |
"loss": 0.2358, | |
"step": 6400 | |
}, | |
{ | |
"epoch": 7.65, | |
"learning_rate": 1e-05, | |
"loss": 0.2367, | |
"step": 6500 | |
}, | |
{ | |
"epoch": 7.76, | |
"learning_rate": 1e-05, | |
"loss": 0.2402, | |
"step": 6600 | |
}, | |
{ | |
"epoch": 7.88, | |
"learning_rate": 1e-05, | |
"loss": 0.2448, | |
"step": 6700 | |
}, | |
{ | |
"epoch": 8.0, | |
"learning_rate": 1e-05, | |
"loss": 0.2458, | |
"step": 6800 | |
}, | |
{ | |
"epoch": 8.12, | |
"learning_rate": 1e-05, | |
"loss": 0.2408, | |
"step": 6900 | |
}, | |
{ | |
"epoch": 8.24, | |
"learning_rate": 1e-05, | |
"loss": 0.2425, | |
"step": 7000 | |
}, | |
{ | |
"epoch": 8.35, | |
"learning_rate": 1e-05, | |
"loss": 0.2302, | |
"step": 7100 | |
}, | |
{ | |
"epoch": 8.47, | |
"learning_rate": 1e-05, | |
"loss": 0.2426, | |
"step": 7200 | |
}, | |
{ | |
"epoch": 8.59, | |
"learning_rate": 1e-05, | |
"loss": 0.2101, | |
"step": 7300 | |
}, | |
{ | |
"epoch": 8.71, | |
"learning_rate": 1e-05, | |
"loss": 0.2328, | |
"step": 7400 | |
}, | |
{ | |
"epoch": 8.82, | |
"learning_rate": 1e-05, | |
"loss": 0.2457, | |
"step": 7500 | |
}, | |
{ | |
"epoch": 8.94, | |
"learning_rate": 1e-05, | |
"loss": 0.2351, | |
"step": 7600 | |
}, | |
{ | |
"epoch": 9.06, | |
"learning_rate": 1e-05, | |
"loss": 0.2483, | |
"step": 7700 | |
}, | |
{ | |
"epoch": 9.18, | |
"learning_rate": 1e-05, | |
"loss": 0.2135, | |
"step": 7800 | |
}, | |
{ | |
"epoch": 9.29, | |
"learning_rate": 1e-05, | |
"loss": 0.2227, | |
"step": 7900 | |
}, | |
{ | |
"epoch": 9.41, | |
"learning_rate": 1e-05, | |
"loss": 0.2342, | |
"step": 8000 | |
}, | |
{ | |
"epoch": 9.53, | |
"learning_rate": 1e-05, | |
"loss": 0.2534, | |
"step": 8100 | |
}, | |
{ | |
"epoch": 9.65, | |
"learning_rate": 1e-05, | |
"loss": 0.2368, | |
"step": 8200 | |
}, | |
{ | |
"epoch": 9.76, | |
"learning_rate": 1e-05, | |
"loss": 0.2531, | |
"step": 8300 | |
}, | |
{ | |
"epoch": 9.88, | |
"learning_rate": 1e-05, | |
"loss": 0.2474, | |
"step": 8400 | |
}, | |
{ | |
"epoch": 10.0, | |
"learning_rate": 1e-05, | |
"loss": 0.2049, | |
"step": 8500 | |
}, | |
{ | |
"epoch": 10.12, | |
"learning_rate": 1e-05, | |
"loss": 0.231, | |
"step": 8600 | |
}, | |
{ | |
"epoch": 10.24, | |
"learning_rate": 1e-05, | |
"loss": 0.2384, | |
"step": 8700 | |
}, | |
{ | |
"epoch": 10.35, | |
"learning_rate": 1e-05, | |
"loss": 0.2363, | |
"step": 8800 | |
}, | |
{ | |
"epoch": 10.47, | |
"learning_rate": 1e-05, | |
"loss": 0.2125, | |
"step": 8900 | |
}, | |
{ | |
"epoch": 10.59, | |
"learning_rate": 1e-05, | |
"loss": 0.224, | |
"step": 9000 | |
}, | |
{ | |
"epoch": 10.71, | |
"learning_rate": 1e-05, | |
"loss": 0.2301, | |
"step": 9100 | |
}, | |
{ | |
"epoch": 10.82, | |
"learning_rate": 1e-05, | |
"loss": 0.2598, | |
"step": 9200 | |
}, | |
{ | |
"epoch": 10.94, | |
"learning_rate": 1e-05, | |
"loss": 0.2121, | |
"step": 9300 | |
}, | |
{ | |
"epoch": 11.06, | |
"learning_rate": 1e-05, | |
"loss": 0.2444, | |
"step": 9400 | |
}, | |
{ | |
"epoch": 11.18, | |
"learning_rate": 1e-05, | |
"loss": 0.2409, | |
"step": 9500 | |
}, | |
{ | |
"epoch": 11.29, | |
"learning_rate": 1e-05, | |
"loss": 0.2274, | |
"step": 9600 | |
}, | |
{ | |
"epoch": 11.41, | |
"learning_rate": 1e-05, | |
"loss": 0.2246, | |
"step": 9700 | |
}, | |
{ | |
"epoch": 11.53, | |
"learning_rate": 1e-05, | |
"loss": 0.2256, | |
"step": 9800 | |
}, | |
{ | |
"epoch": 11.65, | |
"learning_rate": 1e-05, | |
"loss": 0.2244, | |
"step": 9900 | |
}, | |
{ | |
"epoch": 11.76, | |
"learning_rate": 1e-05, | |
"loss": 0.2246, | |
"step": 10000 | |
}, | |
{ | |
"epoch": 11.76, | |
"eval_cer": 0.08288106683206699, | |
"eval_loss": 0.6029604077339172, | |
"eval_runtime": 8.1986, | |
"eval_samples_per_second": 36.592, | |
"eval_steps_per_second": 18.296, | |
"eval_wer": 0.2551282051282051, | |
"step": 10000 | |
}, | |
{ | |
"epoch": 11.88, | |
"learning_rate": 9.903e-06, | |
"loss": 0.2299, | |
"step": 10100 | |
}, | |
{ | |
"epoch": 12.0, | |
"learning_rate": 9.803e-06, | |
"loss": 0.2131, | |
"step": 10200 | |
}, | |
{ | |
"epoch": 12.12, | |
"learning_rate": 9.703000000000002e-06, | |
"loss": 0.2134, | |
"step": 10300 | |
}, | |
{ | |
"epoch": 12.24, | |
"learning_rate": 9.603000000000001e-06, | |
"loss": 0.2388, | |
"step": 10400 | |
}, | |
{ | |
"epoch": 12.35, | |
"learning_rate": 9.503e-06, | |
"loss": 0.2209, | |
"step": 10500 | |
}, | |
{ | |
"epoch": 12.47, | |
"learning_rate": 9.403000000000001e-06, | |
"loss": 0.2347, | |
"step": 10600 | |
}, | |
{ | |
"epoch": 12.59, | |
"learning_rate": 9.303e-06, | |
"loss": 0.2302, | |
"step": 10700 | |
}, | |
{ | |
"epoch": 12.71, | |
"learning_rate": 9.203000000000002e-06, | |
"loss": 0.2174, | |
"step": 10800 | |
}, | |
{ | |
"epoch": 12.82, | |
"learning_rate": 9.103e-06, | |
"loss": 0.2224, | |
"step": 10900 | |
}, | |
{ | |
"epoch": 12.94, | |
"learning_rate": 9.003e-06, | |
"loss": 0.2269, | |
"step": 11000 | |
}, | |
{ | |
"epoch": 13.06, | |
"learning_rate": 8.903000000000001e-06, | |
"loss": 0.2426, | |
"step": 11100 | |
}, | |
{ | |
"epoch": 13.18, | |
"learning_rate": 8.803e-06, | |
"loss": 0.209, | |
"step": 11200 | |
}, | |
{ | |
"epoch": 13.29, | |
"learning_rate": 8.703e-06, | |
"loss": 0.2103, | |
"step": 11300 | |
}, | |
{ | |
"epoch": 13.41, | |
"learning_rate": 8.603e-06, | |
"loss": 0.228, | |
"step": 11400 | |
}, | |
{ | |
"epoch": 13.53, | |
"learning_rate": 8.503e-06, | |
"loss": 0.2256, | |
"step": 11500 | |
}, | |
{ | |
"epoch": 13.65, | |
"learning_rate": 8.404000000000001e-06, | |
"loss": 0.211, | |
"step": 11600 | |
}, | |
{ | |
"epoch": 13.76, | |
"learning_rate": 8.304e-06, | |
"loss": 0.1967, | |
"step": 11700 | |
}, | |
{ | |
"epoch": 13.88, | |
"learning_rate": 8.204000000000001e-06, | |
"loss": 0.2417, | |
"step": 11800 | |
}, | |
{ | |
"epoch": 14.0, | |
"learning_rate": 8.104e-06, | |
"loss": 0.2268, | |
"step": 11900 | |
}, | |
{ | |
"epoch": 14.12, | |
"learning_rate": 8.004e-06, | |
"loss": 0.227, | |
"step": 12000 | |
}, | |
{ | |
"epoch": 14.24, | |
"learning_rate": 7.904000000000001e-06, | |
"loss": 0.2186, | |
"step": 12100 | |
}, | |
{ | |
"epoch": 14.35, | |
"learning_rate": 7.804e-06, | |
"loss": 0.2125, | |
"step": 12200 | |
}, | |
{ | |
"epoch": 14.47, | |
"learning_rate": 7.704000000000001e-06, | |
"loss": 0.2235, | |
"step": 12300 | |
}, | |
{ | |
"epoch": 14.59, | |
"learning_rate": 7.604e-06, | |
"loss": 0.2089, | |
"step": 12400 | |
}, | |
{ | |
"epoch": 14.71, | |
"learning_rate": 7.5040000000000005e-06, | |
"loss": 0.1967, | |
"step": 12500 | |
}, | |
{ | |
"epoch": 14.82, | |
"learning_rate": 7.404e-06, | |
"loss": 0.2175, | |
"step": 12600 | |
}, | |
{ | |
"epoch": 14.94, | |
"learning_rate": 7.304000000000001e-06, | |
"loss": 0.2157, | |
"step": 12700 | |
}, | |
{ | |
"epoch": 15.06, | |
"learning_rate": 7.204000000000001e-06, | |
"loss": 0.1907, | |
"step": 12800 | |
}, | |
{ | |
"epoch": 15.18, | |
"learning_rate": 7.104000000000001e-06, | |
"loss": 0.2072, | |
"step": 12900 | |
}, | |
{ | |
"epoch": 15.29, | |
"learning_rate": 7.004000000000001e-06, | |
"loss": 0.2137, | |
"step": 13000 | |
}, | |
{ | |
"epoch": 15.41, | |
"learning_rate": 6.904e-06, | |
"loss": 0.2137, | |
"step": 13100 | |
}, | |
{ | |
"epoch": 15.53, | |
"learning_rate": 6.804e-06, | |
"loss": 0.2097, | |
"step": 13200 | |
}, | |
{ | |
"epoch": 15.65, | |
"learning_rate": 6.7040000000000005e-06, | |
"loss": 0.2007, | |
"step": 13300 | |
}, | |
{ | |
"epoch": 15.76, | |
"learning_rate": 6.604000000000001e-06, | |
"loss": 0.2183, | |
"step": 13400 | |
}, | |
{ | |
"epoch": 15.88, | |
"learning_rate": 6.504e-06, | |
"loss": 0.1844, | |
"step": 13500 | |
}, | |
{ | |
"epoch": 16.0, | |
"learning_rate": 6.404e-06, | |
"loss": 0.225, | |
"step": 13600 | |
}, | |
{ | |
"epoch": 16.12, | |
"learning_rate": 6.304e-06, | |
"loss": 0.2141, | |
"step": 13700 | |
}, | |
{ | |
"epoch": 16.24, | |
"learning_rate": 6.204e-06, | |
"loss": 0.2047, | |
"step": 13800 | |
}, | |
{ | |
"epoch": 16.35, | |
"learning_rate": 6.104000000000001e-06, | |
"loss": 0.2019, | |
"step": 13900 | |
}, | |
{ | |
"epoch": 16.47, | |
"learning_rate": 6.004000000000001e-06, | |
"loss": 0.1923, | |
"step": 14000 | |
}, | |
{ | |
"epoch": 16.59, | |
"learning_rate": 5.9040000000000006e-06, | |
"loss": 0.2213, | |
"step": 14100 | |
}, | |
{ | |
"epoch": 16.71, | |
"learning_rate": 5.804000000000001e-06, | |
"loss": 0.2035, | |
"step": 14200 | |
}, | |
{ | |
"epoch": 16.82, | |
"learning_rate": 5.704000000000001e-06, | |
"loss": 0.2044, | |
"step": 14300 | |
}, | |
{ | |
"epoch": 16.94, | |
"learning_rate": 5.604000000000001e-06, | |
"loss": 0.2055, | |
"step": 14400 | |
}, | |
{ | |
"epoch": 17.06, | |
"learning_rate": 5.505000000000001e-06, | |
"loss": 0.2051, | |
"step": 14500 | |
}, | |
{ | |
"epoch": 17.18, | |
"learning_rate": 5.405e-06, | |
"loss": 0.2114, | |
"step": 14600 | |
}, | |
{ | |
"epoch": 17.29, | |
"learning_rate": 5.305e-06, | |
"loss": 0.2135, | |
"step": 14700 | |
}, | |
{ | |
"epoch": 17.41, | |
"learning_rate": 5.205e-06, | |
"loss": 0.2079, | |
"step": 14800 | |
}, | |
{ | |
"epoch": 17.53, | |
"learning_rate": 5.105e-06, | |
"loss": 0.183, | |
"step": 14900 | |
}, | |
{ | |
"epoch": 17.65, | |
"learning_rate": 5.0049999999999995e-06, | |
"loss": 0.2098, | |
"step": 15000 | |
}, | |
{ | |
"epoch": 17.65, | |
"eval_cer": 0.07985734222360055, | |
"eval_loss": 0.6368189454078674, | |
"eval_runtime": 7.9866, | |
"eval_samples_per_second": 37.563, | |
"eval_steps_per_second": 18.781, | |
"eval_wer": 0.24188034188034188, | |
"step": 15000 | |
} | |
], | |
"max_steps": 20000, | |
"num_train_epochs": 24, | |
"total_flos": 1.2884780636980976e+19, | |
"trial_name": null, | |
"trial_params": null | |
} | |