| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 18.383683523594183, | |
| "global_step": 250000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1e-05, | |
| "loss": 8.2902, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2e-05, | |
| "loss": 6.7786, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 3e-05, | |
| "loss": 6.1284, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 4e-05, | |
| "loss": 5.5746, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 5e-05, | |
| "loss": 5.1965, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 6e-05, | |
| "loss": 4.8986, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 7.000000000000001e-05, | |
| "loss": 4.6526, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 8e-05, | |
| "loss": 4.4549, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 8.999999999999999e-05, | |
| "loss": 4.2966, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0001, | |
| "loss": 4.1677, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00011, | |
| "loss": 4.0627, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00012, | |
| "loss": 3.9741, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00013000000000000002, | |
| "loss": 3.9031, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00014000000000000001, | |
| "loss": 3.8361, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 0.00015, | |
| "loss": 3.7855, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 0.00016, | |
| "loss": 3.7356, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 0.00017, | |
| "loss": 3.6932, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 0.00017999999999999998, | |
| "loss": 3.6513, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 0.00019, | |
| "loss": 3.624, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 0.0002, | |
| "loss": 3.5894, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 0.00021, | |
| "loss": 3.5661, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 0.00022, | |
| "loss": 3.5401, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 0.00023, | |
| "loss": 3.5177, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 0.00024, | |
| "loss": 3.4938, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 0.00025, | |
| "loss": 3.4777, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 0.00026000000000000003, | |
| "loss": 3.4583, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 0.00027, | |
| "loss": 3.4419, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 0.00028000000000000003, | |
| "loss": 3.429, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 0.00029, | |
| "loss": 3.4086, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 0.0003, | |
| "loss": 3.3968, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 0.00031, | |
| "loss": 3.3852, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 0.00032, | |
| "loss": 3.374, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 0.00033, | |
| "loss": 3.3627, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 0.00034, | |
| "loss": 3.3528, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 0.00035, | |
| "loss": 3.3456, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 0.00035999999999999997, | |
| "loss": 3.3382, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 0.00037, | |
| "loss": 3.3255, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 0.00038, | |
| "loss": 3.321, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 0.00039000000000000005, | |
| "loss": 3.3121, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 0.0004, | |
| "loss": 3.3031, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 0.00041, | |
| "loss": 3.2987, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 0.00042, | |
| "loss": 3.2923, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 0.00043, | |
| "loss": 3.285, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 0.00044, | |
| "loss": 3.2817, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 0.00045000000000000004, | |
| "loss": 3.2749, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 0.00046, | |
| "loss": 3.2674, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 0.00047, | |
| "loss": 3.2678, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 0.00048, | |
| "loss": 3.259, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 0.00049, | |
| "loss": 3.2521, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 0.0005, | |
| "loss": 3.2511, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 0.0004988888888888889, | |
| "loss": 3.2451, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 0.0004977777777777778, | |
| "loss": 3.2395, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 0.0004966666666666666, | |
| "loss": 3.236, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 0.0004955555555555556, | |
| "loss": 3.2274, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 0.0004944444444444445, | |
| "loss": 3.2235, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 0.0004933333333333334, | |
| "loss": 3.2109, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 0.0004922222222222222, | |
| "loss": 3.2111, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 0.0004911111111111111, | |
| "loss": 3.2061, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 0.00049, | |
| "loss": 3.2011, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 0.0004888888888888889, | |
| "loss": 3.1994, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 0.0004877777777777778, | |
| "loss": 3.1935, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 0.0004866666666666667, | |
| "loss": 3.1946, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 0.0004855555555555556, | |
| "loss": 3.1919, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 0.00048444444444444446, | |
| "loss": 3.1861, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 0.00048333333333333334, | |
| "loss": 3.184, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 0.0004822222222222222, | |
| "loss": 3.1815, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 0.0004811111111111111, | |
| "loss": 3.1788, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 0.00048, | |
| "loss": 3.1751, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 0.0004788888888888889, | |
| "loss": 3.1721, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 0.0004777777777777778, | |
| "loss": 3.1705, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 0.0004766666666666667, | |
| "loss": 3.1705, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 0.00047555555555555556, | |
| "loss": 3.164, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 0.00047444444444444444, | |
| "loss": 3.1619, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 0.00047333333333333336, | |
| "loss": 3.1599, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 0.00047222222222222224, | |
| "loss": 3.1587, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 0.0004711111111111111, | |
| "loss": 3.1566, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 0.00047, | |
| "loss": 3.1544, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 0.0004688888888888889, | |
| "loss": 3.1517, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 0.0004677777777777778, | |
| "loss": 3.1504, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 0.00046666666666666666, | |
| "loss": 3.1483, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 0.0004655555555555556, | |
| "loss": 3.1428, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 0.00046444444444444446, | |
| "loss": 3.1435, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 0.00046333333333333334, | |
| "loss": 3.1318, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 3.09, | |
| "learning_rate": 0.0004622222222222222, | |
| "loss": 3.1295, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 0.00046111111111111114, | |
| "loss": 3.1314, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 0.00046, | |
| "loss": 3.1332, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "learning_rate": 0.0004588888888888889, | |
| "loss": 3.1262, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 0.0004577777777777778, | |
| "loss": 3.1272, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 0.0004566666666666667, | |
| "loss": 3.1304, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 0.00045555555555555556, | |
| "loss": 3.1252, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 0.00045444444444444444, | |
| "loss": 3.1238, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 3.38, | |
| "learning_rate": 0.0004533333333333333, | |
| "loss": 3.121, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 0.00045222222222222224, | |
| "loss": 3.1232, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 3.46, | |
| "learning_rate": 0.0004511111111111111, | |
| "loss": 3.1203, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "learning_rate": 0.00045000000000000004, | |
| "loss": 3.1205, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 0.0004488888888888889, | |
| "loss": 3.1182, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 0.0004477777777777778, | |
| "loss": 3.1159, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 0.00044666666666666666, | |
| "loss": 3.112, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "learning_rate": 0.00044555555555555554, | |
| "loss": 3.115, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 0.0004444444444444444, | |
| "loss": 3.1125, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 0.00044333333333333334, | |
| "loss": 3.1117, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "learning_rate": 0.00044222222222222227, | |
| "loss": 3.1099, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 0.00044111111111111114, | |
| "loss": 3.1099, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 0.00044, | |
| "loss": 3.1062, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 0.0004388888888888889, | |
| "loss": 3.1058, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 3.9, | |
| "learning_rate": 0.00043777777777777776, | |
| "loss": 3.1078, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 3.93, | |
| "learning_rate": 0.00043666666666666664, | |
| "loss": 3.1045, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 0.0004355555555555555, | |
| "loss": 3.1042, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 4.01, | |
| "learning_rate": 0.0004344444444444445, | |
| "loss": 3.1081, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 0.00043333333333333337, | |
| "loss": 3.093, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 0.00043222222222222224, | |
| "loss": 3.0919, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 0.0004311111111111111, | |
| "loss": 3.0935, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 0.00043, | |
| "loss": 3.0929, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 4.19, | |
| "learning_rate": 0.00042888888888888886, | |
| "loss": 3.092, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 0.0004277777777777778, | |
| "loss": 3.0893, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 4.27, | |
| "learning_rate": 0.0004266666666666667, | |
| "loss": 3.0916, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 4.3, | |
| "learning_rate": 0.0004255555555555556, | |
| "loss": 3.0878, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 0.00042444444444444447, | |
| "loss": 3.0885, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 0.00042333333333333334, | |
| "loss": 3.0894, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 0.0004222222222222222, | |
| "loss": 3.089, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 4.45, | |
| "learning_rate": 0.0004211111111111111, | |
| "loss": 3.086, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 0.00042, | |
| "loss": 3.0848, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 0.0004188888888888889, | |
| "loss": 3.0845, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 4.56, | |
| "learning_rate": 0.0004177777777777778, | |
| "loss": 3.0864, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 0.0004166666666666667, | |
| "loss": 3.0857, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 4.63, | |
| "learning_rate": 0.00041555555555555557, | |
| "loss": 3.0828, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 0.00041444444444444444, | |
| "loss": 3.0802, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 0.0004133333333333333, | |
| "loss": 3.0832, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 4.74, | |
| "learning_rate": 0.00041222222222222224, | |
| "loss": 3.0804, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 0.0004111111111111111, | |
| "loss": 3.0782, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 0.00041, | |
| "loss": 3.0822, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 0.0004088888888888889, | |
| "loss": 3.0768, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 0.0004077777777777778, | |
| "loss": 3.0798, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 0.00040666666666666667, | |
| "loss": 3.0767, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 0.00040555555555555554, | |
| "loss": 3.0784, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 0.00040444444444444447, | |
| "loss": 3.0799, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 0.00040333333333333334, | |
| "loss": 3.0687, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 0.0004022222222222222, | |
| "loss": 3.0654, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 0.0004011111111111111, | |
| "loss": 3.0658, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 0.0004, | |
| "loss": 3.0666, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 0.0003988888888888889, | |
| "loss": 3.0691, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 0.00039777777777777777, | |
| "loss": 3.0679, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 0.0003966666666666667, | |
| "loss": 3.0663, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 0.00039555555555555557, | |
| "loss": 3.0678, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 0.00039444444444444444, | |
| "loss": 3.0648, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 5.37, | |
| "learning_rate": 0.0003933333333333333, | |
| "loss": 3.0642, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 0.00039222222222222225, | |
| "loss": 3.0655, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 0.0003911111111111111, | |
| "loss": 3.0613, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 0.00039000000000000005, | |
| "loss": 3.0649, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 0.0003888888888888889, | |
| "loss": 3.0645, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 0.0003877777777777778, | |
| "loss": 3.0617, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 0.00038666666666666667, | |
| "loss": 3.0639, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 5.63, | |
| "learning_rate": 0.00038555555555555554, | |
| "loss": 3.0625, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 0.0003844444444444444, | |
| "loss": 3.063, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 5.7, | |
| "learning_rate": 0.00038333333333333334, | |
| "loss": 3.0613, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 0.0003822222222222223, | |
| "loss": 3.0574, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 0.00038111111111111115, | |
| "loss": 3.0592, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 5.81, | |
| "learning_rate": 0.00038, | |
| "loss": 3.057, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 0.0003788888888888889, | |
| "loss": 3.058, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 0.00037777777777777777, | |
| "loss": 3.0594, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 0.00037666666666666664, | |
| "loss": 3.0595, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 0.0003755555555555555, | |
| "loss": 3.0537, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 5.99, | |
| "learning_rate": 0.0003744444444444445, | |
| "loss": 3.0555, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 6.03, | |
| "learning_rate": 0.0003733333333333334, | |
| "loss": 3.0541, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 6.07, | |
| "learning_rate": 0.00037222222222222225, | |
| "loss": 3.0485, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 6.1, | |
| "learning_rate": 0.0003711111111111111, | |
| "loss": 3.0487, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 0.00037, | |
| "loss": 3.0476, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 6.18, | |
| "learning_rate": 0.00036888888888888887, | |
| "loss": 3.0517, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 6.21, | |
| "learning_rate": 0.00036777777777777774, | |
| "loss": 3.0496, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "learning_rate": 0.00036666666666666667, | |
| "loss": 3.0494, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 6.29, | |
| "learning_rate": 0.0003655555555555556, | |
| "loss": 3.0446, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "learning_rate": 0.00036444444444444447, | |
| "loss": 3.0447, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 6.36, | |
| "learning_rate": 0.00036333333333333335, | |
| "loss": 3.0464, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "learning_rate": 0.0003622222222222222, | |
| "loss": 3.0445, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 6.43, | |
| "learning_rate": 0.0003611111111111111, | |
| "loss": 3.0451, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 6.47, | |
| "learning_rate": 0.00035999999999999997, | |
| "loss": 3.0477, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 0.0003588888888888889, | |
| "loss": 3.0463, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 6.54, | |
| "learning_rate": 0.00035777777777777777, | |
| "loss": 3.0464, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 6.58, | |
| "learning_rate": 0.0003566666666666667, | |
| "loss": 3.0456, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 6.62, | |
| "learning_rate": 0.00035555555555555557, | |
| "loss": 3.0459, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 6.65, | |
| "learning_rate": 0.00035444444444444445, | |
| "loss": 3.0411, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 6.69, | |
| "learning_rate": 0.0003533333333333333, | |
| "loss": 3.0432, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 6.73, | |
| "learning_rate": 0.00035222222222222225, | |
| "loss": 3.044, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 6.77, | |
| "learning_rate": 0.0003511111111111111, | |
| "loss": 3.0467, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "learning_rate": 0.00035, | |
| "loss": 3.0449, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 6.84, | |
| "learning_rate": 0.0003488888888888889, | |
| "loss": 3.0422, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 0.0003477777777777778, | |
| "loss": 3.0422, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 6.91, | |
| "learning_rate": 0.00034666666666666667, | |
| "loss": 3.0412, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 0.00034555555555555555, | |
| "loss": 3.0427, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 6.99, | |
| "learning_rate": 0.0003444444444444445, | |
| "loss": 3.0417, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 7.02, | |
| "learning_rate": 0.00034333333333333335, | |
| "loss": 3.0388, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 7.06, | |
| "learning_rate": 0.0003422222222222222, | |
| "loss": 3.0323, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 0.0003411111111111111, | |
| "loss": 3.0308, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 0.00034, | |
| "loss": 3.0339, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 7.17, | |
| "learning_rate": 0.0003388888888888889, | |
| "loss": 3.0324, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 0.00033777777777777777, | |
| "loss": 3.0313, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 7.24, | |
| "learning_rate": 0.0003366666666666667, | |
| "loss": 3.0324, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "learning_rate": 0.0003355555555555556, | |
| "loss": 3.0331, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 0.00033444444444444445, | |
| "loss": 3.0354, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 7.35, | |
| "learning_rate": 0.0003333333333333333, | |
| "loss": 3.0351, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 0.0003322222222222222, | |
| "loss": 3.0331, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 0.0003311111111111111, | |
| "loss": 3.0339, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 7.46, | |
| "learning_rate": 0.00033, | |
| "loss": 3.0332, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 0.0003288888888888889, | |
| "loss": 3.031, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 0.0003277777777777778, | |
| "loss": 3.0326, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 0.0003266666666666667, | |
| "loss": 3.0317, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 0.00032555555555555555, | |
| "loss": 3.0328, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 0.0003244444444444444, | |
| "loss": 3.0309, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 0.0003233333333333333, | |
| "loss": 3.0331, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 0.0003222222222222222, | |
| "loss": 3.0289, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 0.00032111111111111115, | |
| "loss": 3.0311, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 0.00032, | |
| "loss": 3.0277, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 0.0003188888888888889, | |
| "loss": 3.0288, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 0.0003177777777777778, | |
| "loss": 3.0278, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 0.00031666666666666665, | |
| "loss": 3.0322, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 0.0003155555555555555, | |
| "loss": 3.0298, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 7.98, | |
| "learning_rate": 0.0003144444444444445, | |
| "loss": 3.0303, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 8.02, | |
| "learning_rate": 0.0003133333333333334, | |
| "loss": 3.028, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 0.00031222222222222225, | |
| "loss": 3.0171, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 8.09, | |
| "learning_rate": 0.0003111111111111111, | |
| "loss": 3.0179, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 8.13, | |
| "learning_rate": 0.00031, | |
| "loss": 3.0224, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 0.0003088888888888889, | |
| "loss": 3.019, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 8.2, | |
| "learning_rate": 0.00030777777777777775, | |
| "loss": 3.022, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "learning_rate": 0.0003066666666666667, | |
| "loss": 3.0192, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 8.27, | |
| "learning_rate": 0.0003055555555555556, | |
| "loss": 3.0199, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 0.0003044444444444445, | |
| "loss": 3.0199, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 8.35, | |
| "learning_rate": 0.00030333333333333335, | |
| "loss": 3.0236, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 8.38, | |
| "learning_rate": 0.0003022222222222222, | |
| "loss": 3.021, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 8.42, | |
| "learning_rate": 0.0003011111111111111, | |
| "loss": 3.0217, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 8.46, | |
| "learning_rate": 0.0003, | |
| "loss": 3.0238, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 8.49, | |
| "learning_rate": 0.0002988888888888889, | |
| "loss": 3.0219, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 8.53, | |
| "learning_rate": 0.0002977777777777778, | |
| "loss": 3.0215, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 8.57, | |
| "learning_rate": 0.0002966666666666667, | |
| "loss": 3.0195, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 8.6, | |
| "learning_rate": 0.0002955555555555556, | |
| "loss": 3.0216, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "learning_rate": 0.00029444444444444445, | |
| "loss": 3.0182, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 0.0002933333333333333, | |
| "loss": 3.0168, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 8.71, | |
| "learning_rate": 0.0002922222222222222, | |
| "loss": 3.0202, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "learning_rate": 0.00029111111111111113, | |
| "loss": 3.0194, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 8.79, | |
| "learning_rate": 0.00029, | |
| "loss": 3.0237, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 8.82, | |
| "learning_rate": 0.0002888888888888889, | |
| "loss": 3.0203, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 8.86, | |
| "learning_rate": 0.0002877777777777778, | |
| "loss": 3.02, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 8.9, | |
| "learning_rate": 0.0002866666666666667, | |
| "loss": 3.0177, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 0.00028555555555555555, | |
| "loss": 3.0158, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 8.97, | |
| "learning_rate": 0.0002844444444444444, | |
| "loss": 3.0158, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 9.01, | |
| "learning_rate": 0.00028333333333333335, | |
| "loss": 3.0163, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "learning_rate": 0.00028222222222222223, | |
| "loss": 3.0081, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 9.08, | |
| "learning_rate": 0.0002811111111111111, | |
| "loss": 3.0075, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 0.00028000000000000003, | |
| "loss": 3.0117, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 9.16, | |
| "learning_rate": 0.0002788888888888889, | |
| "loss": 3.0084, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 9.19, | |
| "learning_rate": 0.0002777777777777778, | |
| "loss": 3.0084, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 9.23, | |
| "learning_rate": 0.00027666666666666665, | |
| "loss": 3.0093, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 9.27, | |
| "learning_rate": 0.0002755555555555556, | |
| "loss": 3.0095, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 0.00027444444444444445, | |
| "loss": 3.0094, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 9.34, | |
| "learning_rate": 0.00027333333333333333, | |
| "loss": 3.0124, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 9.38, | |
| "learning_rate": 0.0002722222222222222, | |
| "loss": 3.0124, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 9.41, | |
| "learning_rate": 0.00027111111111111113, | |
| "loss": 3.0079, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 9.45, | |
| "learning_rate": 0.00027, | |
| "loss": 3.0097, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 0.00026888888888888893, | |
| "loss": 3.0117, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "learning_rate": 0.0002677777777777778, | |
| "loss": 3.0085, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 9.56, | |
| "learning_rate": 0.0002666666666666667, | |
| "loss": 3.0105, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 0.00026555555555555555, | |
| "loss": 3.0068, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 9.63, | |
| "learning_rate": 0.00026444444444444443, | |
| "loss": 3.0105, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 0.0002633333333333333, | |
| "loss": 3.0098, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 0.00026222222222222223, | |
| "loss": 3.0127, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 0.00026111111111111116, | |
| "loss": 3.0083, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 9.78, | |
| "learning_rate": 0.00026000000000000003, | |
| "loss": 3.0096, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 0.0002588888888888889, | |
| "loss": 3.0084, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 9.85, | |
| "learning_rate": 0.0002577777777777778, | |
| "loss": 3.0096, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 0.00025666666666666665, | |
| "loss": 3.01, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 0.00025555555555555553, | |
| "loss": 3.0065, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 9.96, | |
| "learning_rate": 0.0002544444444444444, | |
| "loss": 3.007, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 0.0002533333333333334, | |
| "loss": 3.0101, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 0.00025222222222222226, | |
| "loss": 2.998, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 10.07, | |
| "learning_rate": 0.00025111111111111113, | |
| "loss": 2.9967, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 0.00025, | |
| "loss": 3.001, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 10.15, | |
| "learning_rate": 0.0002488888888888889, | |
| "loss": 3.0031, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 10.18, | |
| "learning_rate": 0.0002477777777777778, | |
| "loss": 3.0034, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 10.22, | |
| "learning_rate": 0.0002466666666666667, | |
| "loss": 3.0003, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 10.26, | |
| "learning_rate": 0.00024555555555555556, | |
| "loss": 3.0006, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 0.00024444444444444443, | |
| "loss": 3.0, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 10.33, | |
| "learning_rate": 0.00024333333333333336, | |
| "loss": 3.0014, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 10.37, | |
| "learning_rate": 0.00024222222222222223, | |
| "loss": 3.001, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 10.41, | |
| "learning_rate": 0.0002411111111111111, | |
| "loss": 3.0003, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 0.00024, | |
| "loss": 2.9984, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 10.48, | |
| "learning_rate": 0.0002388888888888889, | |
| "loss": 2.9995, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 10.52, | |
| "learning_rate": 0.00023777777777777778, | |
| "loss": 3.0015, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 10.55, | |
| "learning_rate": 0.00023666666666666668, | |
| "loss": 2.9997, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 10.59, | |
| "learning_rate": 0.00023555555555555556, | |
| "loss": 3.0006, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 10.63, | |
| "learning_rate": 0.00023444444444444446, | |
| "loss": 3.0004, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 10.66, | |
| "learning_rate": 0.00023333333333333333, | |
| "loss": 3.0, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 10.7, | |
| "learning_rate": 0.00023222222222222223, | |
| "loss": 2.999, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 10.74, | |
| "learning_rate": 0.0002311111111111111, | |
| "loss": 2.9987, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 10.77, | |
| "learning_rate": 0.00023, | |
| "loss": 3.0009, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 10.81, | |
| "learning_rate": 0.0002288888888888889, | |
| "loss": 2.9978, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 0.00022777777777777778, | |
| "loss": 2.9965, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 10.88, | |
| "learning_rate": 0.00022666666666666666, | |
| "loss": 2.9987, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 10.92, | |
| "learning_rate": 0.00022555555555555556, | |
| "loss": 2.9995, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 10.96, | |
| "learning_rate": 0.00022444444444444446, | |
| "loss": 2.9985, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 10.99, | |
| "learning_rate": 0.00022333333333333333, | |
| "loss": 3.001, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 11.03, | |
| "learning_rate": 0.0002222222222222222, | |
| "loss": 2.9939, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 11.07, | |
| "learning_rate": 0.00022111111111111113, | |
| "loss": 2.9897, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 11.1, | |
| "learning_rate": 0.00022, | |
| "loss": 2.9898, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 11.14, | |
| "learning_rate": 0.00021888888888888888, | |
| "loss": 2.9934, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 11.18, | |
| "learning_rate": 0.00021777777777777776, | |
| "loss": 2.9914, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 11.21, | |
| "learning_rate": 0.00021666666666666668, | |
| "loss": 2.9898, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 11.25, | |
| "learning_rate": 0.00021555555555555556, | |
| "loss": 2.9901, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 11.29, | |
| "learning_rate": 0.00021444444444444443, | |
| "loss": 2.9888, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 11.32, | |
| "learning_rate": 0.00021333333333333336, | |
| "loss": 2.9906, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 11.36, | |
| "learning_rate": 0.00021222222222222223, | |
| "loss": 2.993, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 11.4, | |
| "learning_rate": 0.0002111111111111111, | |
| "loss": 2.9917, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 11.43, | |
| "learning_rate": 0.00021, | |
| "loss": 2.9932, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 11.47, | |
| "learning_rate": 0.0002088888888888889, | |
| "loss": 2.9918, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 11.51, | |
| "learning_rate": 0.00020777777777777778, | |
| "loss": 2.9904, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 11.54, | |
| "learning_rate": 0.00020666666666666666, | |
| "loss": 2.9871, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 11.58, | |
| "learning_rate": 0.00020555555555555556, | |
| "loss": 2.9948, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 11.62, | |
| "learning_rate": 0.00020444444444444446, | |
| "loss": 2.9933, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 11.66, | |
| "learning_rate": 0.00020333333333333333, | |
| "loss": 2.9913, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 11.69, | |
| "learning_rate": 0.00020222222222222223, | |
| "loss": 2.9933, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 11.73, | |
| "learning_rate": 0.0002011111111111111, | |
| "loss": 2.9913, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 11.77, | |
| "learning_rate": 0.0002, | |
| "loss": 2.9878, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 11.8, | |
| "learning_rate": 0.00019888888888888888, | |
| "loss": 2.9913, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 0.00019777777777777778, | |
| "loss": 2.9916, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 11.88, | |
| "learning_rate": 0.00019666666666666666, | |
| "loss": 2.9904, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 11.91, | |
| "learning_rate": 0.00019555555555555556, | |
| "loss": 2.9905, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 11.95, | |
| "learning_rate": 0.00019444444444444446, | |
| "loss": 2.9902, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 11.99, | |
| "learning_rate": 0.00019333333333333333, | |
| "loss": 2.992, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 12.02, | |
| "learning_rate": 0.0001922222222222222, | |
| "loss": 2.9882, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 12.06, | |
| "learning_rate": 0.00019111111111111114, | |
| "loss": 2.98, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 12.1, | |
| "learning_rate": 0.00019, | |
| "loss": 2.9818, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 12.13, | |
| "learning_rate": 0.00018888888888888888, | |
| "loss": 2.9817, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 12.17, | |
| "learning_rate": 0.00018777777777777776, | |
| "loss": 2.9836, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 12.21, | |
| "learning_rate": 0.0001866666666666667, | |
| "loss": 2.981, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 12.24, | |
| "learning_rate": 0.00018555555555555556, | |
| "loss": 2.9864, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 0.00018444444444444443, | |
| "loss": 2.9824, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 12.32, | |
| "learning_rate": 0.00018333333333333334, | |
| "loss": 2.984, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 12.35, | |
| "learning_rate": 0.00018222222222222224, | |
| "loss": 2.983, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 12.39, | |
| "learning_rate": 0.0001811111111111111, | |
| "loss": 2.9828, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 12.43, | |
| "learning_rate": 0.00017999999999999998, | |
| "loss": 2.9817, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 12.46, | |
| "learning_rate": 0.00017888888888888889, | |
| "loss": 2.9834, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 0.00017777777777777779, | |
| "loss": 2.984, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 12.54, | |
| "learning_rate": 0.00017666666666666666, | |
| "loss": 2.983, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 12.57, | |
| "learning_rate": 0.00017555555555555556, | |
| "loss": 2.9839, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 12.61, | |
| "learning_rate": 0.00017444444444444446, | |
| "loss": 2.9826, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 12.65, | |
| "learning_rate": 0.00017333333333333334, | |
| "loss": 2.9842, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 12.68, | |
| "learning_rate": 0.00017222222222222224, | |
| "loss": 2.9854, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 0.0001711111111111111, | |
| "loss": 2.9853, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 12.76, | |
| "learning_rate": 0.00017, | |
| "loss": 2.9829, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 12.8, | |
| "learning_rate": 0.00016888888888888889, | |
| "loss": 2.9829, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 12.83, | |
| "learning_rate": 0.0001677777777777778, | |
| "loss": 2.978, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 12.87, | |
| "learning_rate": 0.00016666666666666666, | |
| "loss": 2.9841, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 12.91, | |
| "learning_rate": 0.00016555555555555556, | |
| "loss": 2.9804, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 0.00016444444444444446, | |
| "loss": 2.9809, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 12.98, | |
| "learning_rate": 0.00016333333333333334, | |
| "loss": 2.979, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 0.0001622222222222222, | |
| "loss": 2.9842, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 13.05, | |
| "learning_rate": 0.0001611111111111111, | |
| "loss": 2.9739, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 13.09, | |
| "learning_rate": 0.00016, | |
| "loss": 2.9713, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 13.13, | |
| "learning_rate": 0.0001588888888888889, | |
| "loss": 2.9755, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 13.16, | |
| "learning_rate": 0.00015777777777777776, | |
| "loss": 2.9766, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 13.2, | |
| "learning_rate": 0.0001566666666666667, | |
| "loss": 2.9751, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 13.24, | |
| "learning_rate": 0.00015555555555555556, | |
| "loss": 2.9764, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 13.27, | |
| "learning_rate": 0.00015444444444444444, | |
| "loss": 2.9778, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 13.31, | |
| "learning_rate": 0.00015333333333333334, | |
| "loss": 2.9729, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 13.35, | |
| "learning_rate": 0.00015222222222222224, | |
| "loss": 2.9737, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 13.38, | |
| "learning_rate": 0.0001511111111111111, | |
| "loss": 2.9753, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 13.42, | |
| "learning_rate": 0.00015, | |
| "loss": 2.9778, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 13.46, | |
| "learning_rate": 0.0001488888888888889, | |
| "loss": 2.9737, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 13.49, | |
| "learning_rate": 0.0001477777777777778, | |
| "loss": 2.9715, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 13.53, | |
| "learning_rate": 0.00014666666666666666, | |
| "loss": 2.973, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 13.57, | |
| "learning_rate": 0.00014555555555555556, | |
| "loss": 2.9754, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 13.6, | |
| "learning_rate": 0.00014444444444444444, | |
| "loss": 2.9724, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 13.64, | |
| "learning_rate": 0.00014333333333333334, | |
| "loss": 2.9735, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 13.68, | |
| "learning_rate": 0.0001422222222222222, | |
| "loss": 2.973, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 13.71, | |
| "learning_rate": 0.00014111111111111111, | |
| "loss": 2.977, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 13.75, | |
| "learning_rate": 0.00014000000000000001, | |
| "loss": 2.9762, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 13.79, | |
| "learning_rate": 0.0001388888888888889, | |
| "loss": 2.975, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 13.82, | |
| "learning_rate": 0.0001377777777777778, | |
| "loss": 2.9754, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 13.86, | |
| "learning_rate": 0.00013666666666666666, | |
| "loss": 2.9783, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 13.9, | |
| "learning_rate": 0.00013555555555555556, | |
| "loss": 2.9729, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 13.93, | |
| "learning_rate": 0.00013444444444444447, | |
| "loss": 2.9747, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 13.97, | |
| "learning_rate": 0.00013333333333333334, | |
| "loss": 2.9757, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 14.01, | |
| "learning_rate": 0.00013222222222222221, | |
| "loss": 2.9748, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 14.05, | |
| "learning_rate": 0.00013111111111111111, | |
| "loss": 2.9663, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "learning_rate": 0.00013000000000000002, | |
| "loss": 2.9664, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 14.12, | |
| "learning_rate": 0.0001288888888888889, | |
| "loss": 2.9635, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 14.16, | |
| "learning_rate": 0.00012777777777777776, | |
| "loss": 2.9708, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 14.19, | |
| "learning_rate": 0.0001266666666666667, | |
| "loss": 2.9642, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 14.23, | |
| "learning_rate": 0.00012555555555555557, | |
| "loss": 2.9657, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 14.27, | |
| "learning_rate": 0.00012444444444444444, | |
| "loss": 2.9659, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 14.3, | |
| "learning_rate": 0.00012333333333333334, | |
| "loss": 2.9707, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 14.34, | |
| "learning_rate": 0.00012222222222222221, | |
| "loss": 2.9662, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 14.38, | |
| "learning_rate": 0.00012111111111111112, | |
| "loss": 2.9689, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 14.41, | |
| "learning_rate": 0.00012, | |
| "loss": 2.9681, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 14.45, | |
| "learning_rate": 0.00011888888888888889, | |
| "loss": 2.9654, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 14.49, | |
| "learning_rate": 0.00011777777777777778, | |
| "loss": 2.9679, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 14.52, | |
| "learning_rate": 0.00011666666666666667, | |
| "loss": 2.9679, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 14.56, | |
| "learning_rate": 0.00011555555555555555, | |
| "loss": 2.9679, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 14.6, | |
| "learning_rate": 0.00011444444444444445, | |
| "loss": 2.9681, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 14.63, | |
| "learning_rate": 0.00011333333333333333, | |
| "loss": 2.9662, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 14.67, | |
| "learning_rate": 0.00011222222222222223, | |
| "loss": 2.9689, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 14.71, | |
| "learning_rate": 0.0001111111111111111, | |
| "loss": 2.9681, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 14.74, | |
| "learning_rate": 0.00011, | |
| "loss": 2.9678, | |
| "step": 200500 | |
| }, | |
| { | |
| "epoch": 14.78, | |
| "learning_rate": 0.00010888888888888888, | |
| "loss": 2.9642, | |
| "step": 201000 | |
| }, | |
| { | |
| "epoch": 14.82, | |
| "learning_rate": 0.00010777777777777778, | |
| "loss": 2.9671, | |
| "step": 201500 | |
| }, | |
| { | |
| "epoch": 14.85, | |
| "learning_rate": 0.00010666666666666668, | |
| "loss": 2.9656, | |
| "step": 202000 | |
| }, | |
| { | |
| "epoch": 14.89, | |
| "learning_rate": 0.00010555555555555555, | |
| "loss": 2.967, | |
| "step": 202500 | |
| }, | |
| { | |
| "epoch": 14.93, | |
| "learning_rate": 0.00010444444444444445, | |
| "loss": 2.9682, | |
| "step": 203000 | |
| }, | |
| { | |
| "epoch": 14.96, | |
| "learning_rate": 0.00010333333333333333, | |
| "loss": 2.9646, | |
| "step": 203500 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "learning_rate": 0.00010222222222222223, | |
| "loss": 2.9696, | |
| "step": 204000 | |
| }, | |
| { | |
| "epoch": 15.04, | |
| "learning_rate": 0.00010111111111111112, | |
| "loss": 2.9576, | |
| "step": 204500 | |
| }, | |
| { | |
| "epoch": 15.07, | |
| "learning_rate": 0.0001, | |
| "loss": 2.9575, | |
| "step": 205000 | |
| }, | |
| { | |
| "epoch": 15.11, | |
| "learning_rate": 9.888888888888889e-05, | |
| "loss": 2.9589, | |
| "step": 205500 | |
| }, | |
| { | |
| "epoch": 15.15, | |
| "learning_rate": 9.777777777777778e-05, | |
| "loss": 2.9596, | |
| "step": 206000 | |
| }, | |
| { | |
| "epoch": 15.18, | |
| "learning_rate": 9.666666666666667e-05, | |
| "loss": 2.9611, | |
| "step": 206500 | |
| }, | |
| { | |
| "epoch": 15.22, | |
| "learning_rate": 9.555555555555557e-05, | |
| "loss": 2.9601, | |
| "step": 207000 | |
| }, | |
| { | |
| "epoch": 15.26, | |
| "learning_rate": 9.444444444444444e-05, | |
| "loss": 2.962, | |
| "step": 207500 | |
| }, | |
| { | |
| "epoch": 15.3, | |
| "learning_rate": 9.333333333333334e-05, | |
| "loss": 2.9607, | |
| "step": 208000 | |
| }, | |
| { | |
| "epoch": 15.33, | |
| "learning_rate": 9.222222222222222e-05, | |
| "loss": 2.9578, | |
| "step": 208500 | |
| }, | |
| { | |
| "epoch": 15.37, | |
| "learning_rate": 9.111111111111112e-05, | |
| "loss": 2.9591, | |
| "step": 209000 | |
| }, | |
| { | |
| "epoch": 15.41, | |
| "learning_rate": 8.999999999999999e-05, | |
| "loss": 2.959, | |
| "step": 209500 | |
| }, | |
| { | |
| "epoch": 15.44, | |
| "learning_rate": 8.888888888888889e-05, | |
| "loss": 2.9613, | |
| "step": 210000 | |
| }, | |
| { | |
| "epoch": 15.48, | |
| "learning_rate": 8.777777777777778e-05, | |
| "loss": 2.9612, | |
| "step": 210500 | |
| }, | |
| { | |
| "epoch": 15.52, | |
| "learning_rate": 8.666666666666667e-05, | |
| "loss": 2.9569, | |
| "step": 211000 | |
| }, | |
| { | |
| "epoch": 15.55, | |
| "learning_rate": 8.555555555555556e-05, | |
| "loss": 2.9585, | |
| "step": 211500 | |
| }, | |
| { | |
| "epoch": 15.59, | |
| "learning_rate": 8.444444444444444e-05, | |
| "loss": 2.9591, | |
| "step": 212000 | |
| }, | |
| { | |
| "epoch": 15.63, | |
| "learning_rate": 8.333333333333333e-05, | |
| "loss": 2.9603, | |
| "step": 212500 | |
| }, | |
| { | |
| "epoch": 15.66, | |
| "learning_rate": 8.222222222222223e-05, | |
| "loss": 2.9568, | |
| "step": 213000 | |
| }, | |
| { | |
| "epoch": 15.7, | |
| "learning_rate": 8.11111111111111e-05, | |
| "loss": 2.9602, | |
| "step": 213500 | |
| }, | |
| { | |
| "epoch": 15.74, | |
| "learning_rate": 8e-05, | |
| "loss": 2.9611, | |
| "step": 214000 | |
| }, | |
| { | |
| "epoch": 15.77, | |
| "learning_rate": 7.888888888888888e-05, | |
| "loss": 2.9602, | |
| "step": 214500 | |
| }, | |
| { | |
| "epoch": 15.81, | |
| "learning_rate": 7.777777777777778e-05, | |
| "loss": 2.9579, | |
| "step": 215000 | |
| }, | |
| { | |
| "epoch": 15.85, | |
| "learning_rate": 7.666666666666667e-05, | |
| "loss": 2.959, | |
| "step": 215500 | |
| }, | |
| { | |
| "epoch": 15.88, | |
| "learning_rate": 7.555555555555556e-05, | |
| "loss": 2.9618, | |
| "step": 216000 | |
| }, | |
| { | |
| "epoch": 15.92, | |
| "learning_rate": 7.444444444444444e-05, | |
| "loss": 2.9592, | |
| "step": 216500 | |
| }, | |
| { | |
| "epoch": 15.96, | |
| "learning_rate": 7.333333333333333e-05, | |
| "loss": 2.9575, | |
| "step": 217000 | |
| }, | |
| { | |
| "epoch": 15.99, | |
| "learning_rate": 7.222222222222222e-05, | |
| "loss": 2.9577, | |
| "step": 217500 | |
| }, | |
| { | |
| "epoch": 16.03, | |
| "learning_rate": 7.11111111111111e-05, | |
| "loss": 2.9578, | |
| "step": 218000 | |
| }, | |
| { | |
| "epoch": 16.07, | |
| "learning_rate": 7.000000000000001e-05, | |
| "loss": 2.9527, | |
| "step": 218500 | |
| }, | |
| { | |
| "epoch": 16.1, | |
| "learning_rate": 6.88888888888889e-05, | |
| "loss": 2.953, | |
| "step": 219000 | |
| }, | |
| { | |
| "epoch": 16.14, | |
| "learning_rate": 6.777777777777778e-05, | |
| "loss": 2.9519, | |
| "step": 219500 | |
| }, | |
| { | |
| "epoch": 16.18, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 2.9501, | |
| "step": 220000 | |
| }, | |
| { | |
| "epoch": 16.21, | |
| "learning_rate": 6.555555555555556e-05, | |
| "loss": 2.9535, | |
| "step": 220500 | |
| }, | |
| { | |
| "epoch": 16.25, | |
| "learning_rate": 6.444444444444444e-05, | |
| "loss": 2.9545, | |
| "step": 221000 | |
| }, | |
| { | |
| "epoch": 16.29, | |
| "learning_rate": 6.333333333333335e-05, | |
| "loss": 2.9539, | |
| "step": 221500 | |
| }, | |
| { | |
| "epoch": 16.32, | |
| "learning_rate": 6.222222222222222e-05, | |
| "loss": 2.9518, | |
| "step": 222000 | |
| }, | |
| { | |
| "epoch": 16.36, | |
| "learning_rate": 6.111111111111111e-05, | |
| "loss": 2.957, | |
| "step": 222500 | |
| }, | |
| { | |
| "epoch": 16.4, | |
| "learning_rate": 6e-05, | |
| "loss": 2.9535, | |
| "step": 223000 | |
| }, | |
| { | |
| "epoch": 16.44, | |
| "learning_rate": 5.888888888888889e-05, | |
| "loss": 2.9506, | |
| "step": 223500 | |
| }, | |
| { | |
| "epoch": 16.47, | |
| "learning_rate": 5.7777777777777776e-05, | |
| "loss": 2.9557, | |
| "step": 224000 | |
| }, | |
| { | |
| "epoch": 16.51, | |
| "learning_rate": 5.6666666666666664e-05, | |
| "loss": 2.952, | |
| "step": 224500 | |
| }, | |
| { | |
| "epoch": 16.55, | |
| "learning_rate": 5.555555555555555e-05, | |
| "loss": 2.9513, | |
| "step": 225000 | |
| }, | |
| { | |
| "epoch": 16.58, | |
| "learning_rate": 5.444444444444444e-05, | |
| "loss": 2.9493, | |
| "step": 225500 | |
| }, | |
| { | |
| "epoch": 16.62, | |
| "learning_rate": 5.333333333333334e-05, | |
| "loss": 2.9475, | |
| "step": 226000 | |
| }, | |
| { | |
| "epoch": 16.66, | |
| "learning_rate": 5.222222222222223e-05, | |
| "loss": 2.9513, | |
| "step": 226500 | |
| }, | |
| { | |
| "epoch": 16.69, | |
| "learning_rate": 5.1111111111111115e-05, | |
| "loss": 2.9525, | |
| "step": 227000 | |
| }, | |
| { | |
| "epoch": 16.73, | |
| "learning_rate": 5e-05, | |
| "loss": 2.9538, | |
| "step": 227500 | |
| }, | |
| { | |
| "epoch": 16.77, | |
| "learning_rate": 4.888888888888889e-05, | |
| "loss": 2.9516, | |
| "step": 228000 | |
| }, | |
| { | |
| "epoch": 16.8, | |
| "learning_rate": 4.7777777777777784e-05, | |
| "loss": 2.9537, | |
| "step": 228500 | |
| }, | |
| { | |
| "epoch": 16.84, | |
| "learning_rate": 4.666666666666667e-05, | |
| "loss": 2.9512, | |
| "step": 229000 | |
| }, | |
| { | |
| "epoch": 16.88, | |
| "learning_rate": 4.555555555555556e-05, | |
| "loss": 2.9518, | |
| "step": 229500 | |
| }, | |
| { | |
| "epoch": 16.91, | |
| "learning_rate": 4.4444444444444447e-05, | |
| "loss": 2.9498, | |
| "step": 230000 | |
| }, | |
| { | |
| "epoch": 16.95, | |
| "learning_rate": 4.3333333333333334e-05, | |
| "loss": 2.9512, | |
| "step": 230500 | |
| }, | |
| { | |
| "epoch": 16.99, | |
| "learning_rate": 4.222222222222222e-05, | |
| "loss": 2.948, | |
| "step": 231000 | |
| }, | |
| { | |
| "epoch": 17.02, | |
| "learning_rate": 4.1111111111111116e-05, | |
| "loss": 2.9498, | |
| "step": 231500 | |
| }, | |
| { | |
| "epoch": 17.06, | |
| "learning_rate": 4e-05, | |
| "loss": 2.9466, | |
| "step": 232000 | |
| }, | |
| { | |
| "epoch": 17.1, | |
| "learning_rate": 3.888888888888889e-05, | |
| "loss": 2.9452, | |
| "step": 232500 | |
| }, | |
| { | |
| "epoch": 17.13, | |
| "learning_rate": 3.777777777777778e-05, | |
| "loss": 2.942, | |
| "step": 233000 | |
| }, | |
| { | |
| "epoch": 17.17, | |
| "learning_rate": 3.6666666666666666e-05, | |
| "loss": 2.9463, | |
| "step": 233500 | |
| }, | |
| { | |
| "epoch": 17.21, | |
| "learning_rate": 3.555555555555555e-05, | |
| "loss": 2.9451, | |
| "step": 234000 | |
| }, | |
| { | |
| "epoch": 17.24, | |
| "learning_rate": 3.444444444444445e-05, | |
| "loss": 2.9504, | |
| "step": 234500 | |
| }, | |
| { | |
| "epoch": 17.28, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 2.9457, | |
| "step": 235000 | |
| }, | |
| { | |
| "epoch": 17.32, | |
| "learning_rate": 3.222222222222222e-05, | |
| "loss": 2.9466, | |
| "step": 235500 | |
| }, | |
| { | |
| "epoch": 17.35, | |
| "learning_rate": 3.111111111111111e-05, | |
| "loss": 2.9444, | |
| "step": 236000 | |
| }, | |
| { | |
| "epoch": 17.39, | |
| "learning_rate": 3e-05, | |
| "loss": 2.9461, | |
| "step": 236500 | |
| }, | |
| { | |
| "epoch": 17.43, | |
| "learning_rate": 2.8888888888888888e-05, | |
| "loss": 2.9427, | |
| "step": 237000 | |
| }, | |
| { | |
| "epoch": 17.46, | |
| "learning_rate": 2.7777777777777776e-05, | |
| "loss": 2.9454, | |
| "step": 237500 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "learning_rate": 2.666666666666667e-05, | |
| "loss": 2.9451, | |
| "step": 238000 | |
| }, | |
| { | |
| "epoch": 17.54, | |
| "learning_rate": 2.5555555555555557e-05, | |
| "loss": 2.9455, | |
| "step": 238500 | |
| }, | |
| { | |
| "epoch": 17.57, | |
| "learning_rate": 2.4444444444444445e-05, | |
| "loss": 2.943, | |
| "step": 239000 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "learning_rate": 2.3333333333333336e-05, | |
| "loss": 2.9441, | |
| "step": 239500 | |
| }, | |
| { | |
| "epoch": 17.65, | |
| "learning_rate": 2.2222222222222223e-05, | |
| "loss": 2.9429, | |
| "step": 240000 | |
| }, | |
| { | |
| "epoch": 17.69, | |
| "learning_rate": 2.111111111111111e-05, | |
| "loss": 2.9447, | |
| "step": 240500 | |
| }, | |
| { | |
| "epoch": 17.72, | |
| "learning_rate": 2e-05, | |
| "loss": 2.9467, | |
| "step": 241000 | |
| }, | |
| { | |
| "epoch": 17.76, | |
| "learning_rate": 1.888888888888889e-05, | |
| "loss": 2.9462, | |
| "step": 241500 | |
| }, | |
| { | |
| "epoch": 17.8, | |
| "learning_rate": 1.7777777777777777e-05, | |
| "loss": 2.9446, | |
| "step": 242000 | |
| }, | |
| { | |
| "epoch": 17.83, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 2.9447, | |
| "step": 242500 | |
| }, | |
| { | |
| "epoch": 17.87, | |
| "learning_rate": 1.5555555555555555e-05, | |
| "loss": 2.9431, | |
| "step": 243000 | |
| }, | |
| { | |
| "epoch": 17.91, | |
| "learning_rate": 1.4444444444444444e-05, | |
| "loss": 2.9426, | |
| "step": 243500 | |
| }, | |
| { | |
| "epoch": 17.94, | |
| "learning_rate": 1.3333333333333335e-05, | |
| "loss": 2.9422, | |
| "step": 244000 | |
| }, | |
| { | |
| "epoch": 17.98, | |
| "learning_rate": 1.2222222222222222e-05, | |
| "loss": 2.9461, | |
| "step": 244500 | |
| }, | |
| { | |
| "epoch": 18.02, | |
| "learning_rate": 1.1111111111111112e-05, | |
| "loss": 2.9446, | |
| "step": 245000 | |
| }, | |
| { | |
| "epoch": 18.05, | |
| "learning_rate": 1e-05, | |
| "loss": 2.9393, | |
| "step": 245500 | |
| }, | |
| { | |
| "epoch": 18.09, | |
| "learning_rate": 8.888888888888888e-06, | |
| "loss": 2.9413, | |
| "step": 246000 | |
| }, | |
| { | |
| "epoch": 18.13, | |
| "learning_rate": 7.777777777777777e-06, | |
| "loss": 2.9401, | |
| "step": 246500 | |
| }, | |
| { | |
| "epoch": 18.16, | |
| "learning_rate": 6.6666666666666675e-06, | |
| "loss": 2.9388, | |
| "step": 247000 | |
| }, | |
| { | |
| "epoch": 18.2, | |
| "learning_rate": 5.555555555555556e-06, | |
| "loss": 2.9385, | |
| "step": 247500 | |
| }, | |
| { | |
| "epoch": 18.24, | |
| "learning_rate": 4.444444444444444e-06, | |
| "loss": 2.9404, | |
| "step": 248000 | |
| }, | |
| { | |
| "epoch": 18.27, | |
| "learning_rate": 3.3333333333333337e-06, | |
| "loss": 2.9399, | |
| "step": 248500 | |
| }, | |
| { | |
| "epoch": 18.31, | |
| "learning_rate": 2.222222222222222e-06, | |
| "loss": 2.9377, | |
| "step": 249000 | |
| }, | |
| { | |
| "epoch": 18.35, | |
| "learning_rate": 1.111111111111111e-06, | |
| "loss": 2.9394, | |
| "step": 249500 | |
| }, | |
| { | |
| "epoch": 18.38, | |
| "learning_rate": 0.0, | |
| "loss": 2.9388, | |
| "step": 250000 | |
| }, | |
| { | |
| "epoch": 18.38, | |
| "step": 250000, | |
| "total_flos": 4.180898268315648e+18, | |
| "train_loss": 3.104699864746094, | |
| "train_runtime": 449655.1545, | |
| "train_samples_per_second": 142.331, | |
| "train_steps_per_second": 0.556 | |
| } | |
| ], | |
| "max_steps": 250000, | |
| "num_train_epochs": 19, | |
| "total_flos": 4.180898268315648e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |