| { | |
| "best_metric": 95.5, | |
| "best_model_checkpoint": "./medium_TH/checkpoint-3000", | |
| "epoch": 88.23529411764706, | |
| "global_step": 3000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 4.4e-07, | |
| "loss": 0.8659, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 9.400000000000001e-07, | |
| "loss": 0.7368, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.44e-06, | |
| "loss": 0.5972, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.94e-06, | |
| "loss": 0.5492, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 2.4400000000000004e-06, | |
| "loss": 0.4563, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 2.9400000000000002e-06, | |
| "loss": 0.4365, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 3.44e-06, | |
| "loss": 0.5196, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 3.94e-06, | |
| "loss": 0.3682, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 4.440000000000001e-06, | |
| "loss": 0.2693, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 4.94e-06, | |
| "loss": 0.2433, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 5.4400000000000004e-06, | |
| "loss": 0.2578, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 5.94e-06, | |
| "loss": 0.173, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 6.440000000000001e-06, | |
| "loss": 0.1173, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 6.9400000000000005e-06, | |
| "loss": 0.1491, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 7.440000000000001e-06, | |
| "loss": 0.1102, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 11.76, | |
| "learning_rate": 7.94e-06, | |
| "loss": 0.1325, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 8.44e-06, | |
| "loss": 0.1195, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 13.24, | |
| "learning_rate": 8.94e-06, | |
| "loss": 0.0834, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 9.440000000000001e-06, | |
| "loss": 0.1089, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 14.71, | |
| "learning_rate": 9.940000000000001e-06, | |
| "loss": 0.0594, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 15.44, | |
| "learning_rate": 9.937142857142858e-06, | |
| "loss": 0.0973, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 16.18, | |
| "learning_rate": 9.865714285714285e-06, | |
| "loss": 0.0344, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 16.91, | |
| "learning_rate": 9.794285714285714e-06, | |
| "loss": 0.0628, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 17.65, | |
| "learning_rate": 9.722857142857143e-06, | |
| "loss": 0.0611, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 18.38, | |
| "learning_rate": 9.651428571428572e-06, | |
| "loss": 0.054, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 19.12, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.069, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 19.85, | |
| "learning_rate": 9.508571428571429e-06, | |
| "loss": 0.0491, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 20.59, | |
| "learning_rate": 9.437142857142858e-06, | |
| "loss": 0.0958, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 21.32, | |
| "learning_rate": 9.365714285714287e-06, | |
| "loss": 0.0477, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 22.06, | |
| "learning_rate": 9.294285714285714e-06, | |
| "loss": 0.0318, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 22.79, | |
| "learning_rate": 9.222857142857143e-06, | |
| "loss": 0.0691, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 23.53, | |
| "learning_rate": 9.151428571428572e-06, | |
| "loss": 0.0317, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 24.26, | |
| "learning_rate": 9.080000000000001e-06, | |
| "loss": 0.0221, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "learning_rate": 9.00857142857143e-06, | |
| "loss": 0.039, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 25.74, | |
| "learning_rate": 8.937142857142857e-06, | |
| "loss": 0.0344, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 26.47, | |
| "learning_rate": 8.865714285714287e-06, | |
| "loss": 0.0168, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 27.21, | |
| "learning_rate": 8.794285714285716e-06, | |
| "loss": 0.0346, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 27.94, | |
| "learning_rate": 8.722857142857145e-06, | |
| "loss": 0.0437, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 28.68, | |
| "learning_rate": 8.651428571428572e-06, | |
| "loss": 0.0197, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 29.41, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0303, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 29.41, | |
| "eval_loss": 0.3286643922328949, | |
| "eval_runtime": 10.3915, | |
| "eval_samples_per_second": 2.213, | |
| "eval_steps_per_second": 0.289, | |
| "eval_wer": 106.0, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 30.15, | |
| "learning_rate": 8.50857142857143e-06, | |
| "loss": 0.0517, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 30.88, | |
| "learning_rate": 8.437142857142859e-06, | |
| "loss": 0.0381, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 31.62, | |
| "learning_rate": 8.365714285714286e-06, | |
| "loss": 0.0309, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 32.35, | |
| "learning_rate": 8.294285714285715e-06, | |
| "loss": 0.0078, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 33.09, | |
| "learning_rate": 8.222857142857144e-06, | |
| "loss": 0.0297, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 33.82, | |
| "learning_rate": 8.151428571428572e-06, | |
| "loss": 0.0247, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 34.56, | |
| "learning_rate": 8.08e-06, | |
| "loss": 0.0098, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 35.29, | |
| "learning_rate": 8.00857142857143e-06, | |
| "loss": 0.0083, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 36.03, | |
| "learning_rate": 7.937142857142857e-06, | |
| "loss": 0.0296, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 36.76, | |
| "learning_rate": 7.865714285714286e-06, | |
| "loss": 0.021, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "learning_rate": 7.794285714285715e-06, | |
| "loss": 0.0403, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 38.24, | |
| "learning_rate": 7.722857142857142e-06, | |
| "loss": 0.0225, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 38.97, | |
| "learning_rate": 7.651428571428571e-06, | |
| "loss": 0.0295, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 39.71, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.0144, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 40.44, | |
| "learning_rate": 7.508571428571429e-06, | |
| "loss": 0.0271, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 41.18, | |
| "learning_rate": 7.4371428571428575e-06, | |
| "loss": 0.0225, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 41.91, | |
| "learning_rate": 7.365714285714286e-06, | |
| "loss": 0.0242, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 42.65, | |
| "learning_rate": 7.294285714285715e-06, | |
| "loss": 0.0138, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 43.38, | |
| "learning_rate": 7.222857142857144e-06, | |
| "loss": 0.0106, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 44.12, | |
| "learning_rate": 7.151428571428573e-06, | |
| "loss": 0.0221, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 44.85, | |
| "learning_rate": 7.08e-06, | |
| "loss": 0.0304, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 45.59, | |
| "learning_rate": 7.008571428571429e-06, | |
| "loss": 0.0224, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 46.32, | |
| "learning_rate": 6.937142857142858e-06, | |
| "loss": 0.0208, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 47.06, | |
| "learning_rate": 6.865714285714287e-06, | |
| "loss": 0.0057, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 47.79, | |
| "learning_rate": 6.794285714285714e-06, | |
| "loss": 0.0181, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 48.53, | |
| "learning_rate": 6.722857142857143e-06, | |
| "loss": 0.0118, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 49.26, | |
| "learning_rate": 6.651428571428572e-06, | |
| "loss": 0.0158, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.015, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 50.74, | |
| "learning_rate": 6.5085714285714295e-06, | |
| "loss": 0.0095, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 51.47, | |
| "learning_rate": 6.437142857142858e-06, | |
| "loss": 0.015, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 52.21, | |
| "learning_rate": 6.365714285714286e-06, | |
| "loss": 0.0099, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 52.94, | |
| "learning_rate": 6.294285714285715e-06, | |
| "loss": 0.0111, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 53.68, | |
| "learning_rate": 6.222857142857144e-06, | |
| "loss": 0.0165, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 54.41, | |
| "learning_rate": 6.151428571428571e-06, | |
| "loss": 0.0097, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 55.15, | |
| "learning_rate": 6.08e-06, | |
| "loss": 0.0042, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 55.88, | |
| "learning_rate": 6.008571428571429e-06, | |
| "loss": 0.0222, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 56.62, | |
| "learning_rate": 5.937142857142858e-06, | |
| "loss": 0.009, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 57.35, | |
| "learning_rate": 5.865714285714286e-06, | |
| "loss": 0.0052, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 58.09, | |
| "learning_rate": 5.794285714285715e-06, | |
| "loss": 0.0126, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 58.82, | |
| "learning_rate": 5.722857142857144e-06, | |
| "loss": 0.0142, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 58.82, | |
| "eval_loss": 0.3790155053138733, | |
| "eval_runtime": 10.2927, | |
| "eval_samples_per_second": 2.235, | |
| "eval_steps_per_second": 0.291, | |
| "eval_wer": 99.5, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 59.56, | |
| "learning_rate": 5.651428571428572e-06, | |
| "loss": 0.0117, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 60.29, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0079, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 61.03, | |
| "learning_rate": 5.508571428571429e-06, | |
| "loss": 0.0044, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 61.76, | |
| "learning_rate": 5.437142857142857e-06, | |
| "loss": 0.007, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 62.5, | |
| "learning_rate": 5.365714285714286e-06, | |
| "loss": 0.021, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 63.24, | |
| "learning_rate": 5.294285714285715e-06, | |
| "loss": 0.0102, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 63.97, | |
| "learning_rate": 5.2228571428571425e-06, | |
| "loss": 0.0035, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 64.71, | |
| "learning_rate": 5.1514285714285715e-06, | |
| "loss": 0.0072, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 65.44, | |
| "learning_rate": 5.0800000000000005e-06, | |
| "loss": 0.0032, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 66.18, | |
| "learning_rate": 5.0085714285714295e-06, | |
| "loss": 0.0047, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 66.91, | |
| "learning_rate": 4.937142857142858e-06, | |
| "loss": 0.003, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 67.65, | |
| "learning_rate": 4.865714285714287e-06, | |
| "loss": 0.0114, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 68.38, | |
| "learning_rate": 4.794285714285715e-06, | |
| "loss": 0.0073, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 69.12, | |
| "learning_rate": 4.722857142857144e-06, | |
| "loss": 0.0119, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 69.85, | |
| "learning_rate": 4.651428571428572e-06, | |
| "loss": 0.0045, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 70.59, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0147, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 71.32, | |
| "learning_rate": 4.508571428571429e-06, | |
| "loss": 0.0081, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 72.06, | |
| "learning_rate": 4.437142857142857e-06, | |
| "loss": 0.0159, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 72.79, | |
| "learning_rate": 4.3657142857142855e-06, | |
| "loss": 0.0032, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 73.53, | |
| "learning_rate": 4.2942857142857146e-06, | |
| "loss": 0.0081, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 74.26, | |
| "learning_rate": 4.222857142857143e-06, | |
| "loss": 0.0116, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 75.0, | |
| "learning_rate": 4.151428571428572e-06, | |
| "loss": 0.0012, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 75.74, | |
| "learning_rate": 4.08e-06, | |
| "loss": 0.0018, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 76.47, | |
| "learning_rate": 4.008571428571429e-06, | |
| "loss": 0.0059, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 77.21, | |
| "learning_rate": 3.937142857142858e-06, | |
| "loss": 0.0032, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 77.94, | |
| "learning_rate": 3.865714285714286e-06, | |
| "loss": 0.0089, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 78.68, | |
| "learning_rate": 3.7942857142857147e-06, | |
| "loss": 0.0044, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 79.41, | |
| "learning_rate": 3.722857142857143e-06, | |
| "loss": 0.0049, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 80.15, | |
| "learning_rate": 3.651428571428572e-06, | |
| "loss": 0.0038, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 80.88, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0053, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 81.62, | |
| "learning_rate": 3.508571428571429e-06, | |
| "loss": 0.004, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 82.35, | |
| "learning_rate": 3.437142857142857e-06, | |
| "loss": 0.0495, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 83.09, | |
| "learning_rate": 3.3657142857142862e-06, | |
| "loss": 0.0035, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 83.82, | |
| "learning_rate": 3.2942857142857144e-06, | |
| "loss": 0.0044, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 84.56, | |
| "learning_rate": 3.222857142857143e-06, | |
| "loss": 0.005, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 85.29, | |
| "learning_rate": 3.151428571428572e-06, | |
| "loss": 0.0102, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 86.03, | |
| "learning_rate": 3.08e-06, | |
| "loss": 0.0054, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 86.76, | |
| "learning_rate": 3.008571428571429e-06, | |
| "loss": 0.0095, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 87.5, | |
| "learning_rate": 2.9371428571428573e-06, | |
| "loss": 0.0065, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 88.24, | |
| "learning_rate": 2.865714285714286e-06, | |
| "loss": 0.0032, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 88.24, | |
| "eval_loss": 0.3970164954662323, | |
| "eval_runtime": 10.2993, | |
| "eval_samples_per_second": 2.233, | |
| "eval_steps_per_second": 0.291, | |
| "eval_wer": 95.5, | |
| "step": 3000 | |
| } | |
| ], | |
| "max_steps": 4000, | |
| "num_train_epochs": 118, | |
| "total_flos": 1.801163540791296e+19, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |