| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004, | |
| "grad_norm": 42.0, | |
| "learning_rate": 8.000000000000001e-07, | |
| "loss": 16.1356, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.008, | |
| "grad_norm": 39.25, | |
| "learning_rate": 1.6000000000000001e-06, | |
| "loss": 16.1383, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.012, | |
| "grad_norm": 40.25, | |
| "learning_rate": 2.4000000000000003e-06, | |
| "loss": 15.4331, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 34.75, | |
| "learning_rate": 3.2000000000000003e-06, | |
| "loss": 16.3866, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 33.25, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 16.0455, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.024, | |
| "grad_norm": 26.875, | |
| "learning_rate": 4.800000000000001e-06, | |
| "loss": 16.2563, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.028, | |
| "grad_norm": 24.25, | |
| "learning_rate": 5.600000000000001e-06, | |
| "loss": 16.1983, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 20.625, | |
| "learning_rate": 6.4000000000000006e-06, | |
| "loss": 15.3843, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.036, | |
| "grad_norm": 23.25, | |
| "learning_rate": 7.2000000000000005e-06, | |
| "loss": 15.5479, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 20.75, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 15.0409, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.044, | |
| "grad_norm": 25.5, | |
| "learning_rate": 8.8e-06, | |
| "loss": 15.0215, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 24.625, | |
| "learning_rate": 9.600000000000001e-06, | |
| "loss": 14.8879, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.052, | |
| "grad_norm": 26.625, | |
| "learning_rate": 1.04e-05, | |
| "loss": 14.597, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.056, | |
| "grad_norm": 21.25, | |
| "learning_rate": 1.1200000000000001e-05, | |
| "loss": 15.0546, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 25.875, | |
| "learning_rate": 1.2e-05, | |
| "loss": 14.8386, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 25.875, | |
| "learning_rate": 1.2800000000000001e-05, | |
| "loss": 14.5003, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.068, | |
| "grad_norm": 31.375, | |
| "learning_rate": 1.3600000000000002e-05, | |
| "loss": 15.1483, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.072, | |
| "grad_norm": 25.25, | |
| "learning_rate": 1.4400000000000001e-05, | |
| "loss": 14.6315, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.076, | |
| "grad_norm": 35.25, | |
| "learning_rate": 1.5200000000000002e-05, | |
| "loss": 13.9271, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 28.5, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 14.139, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.084, | |
| "grad_norm": 22.25, | |
| "learning_rate": 1.6800000000000002e-05, | |
| "loss": 14.9061, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.088, | |
| "grad_norm": 37.0, | |
| "learning_rate": 1.76e-05, | |
| "loss": 14.4556, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.092, | |
| "grad_norm": 32.25, | |
| "learning_rate": 1.8400000000000003e-05, | |
| "loss": 14.2981, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 41.25, | |
| "learning_rate": 1.9200000000000003e-05, | |
| "loss": 14.9684, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 33.75, | |
| "learning_rate": 2e-05, | |
| "loss": 14.6508, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.104, | |
| "grad_norm": 33.25, | |
| "learning_rate": 1.9999025240093045e-05, | |
| "loss": 14.3454, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.108, | |
| "grad_norm": 28.375, | |
| "learning_rate": 1.9996101150403543e-05, | |
| "loss": 14.1579, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 31.875, | |
| "learning_rate": 1.9991228300988586e-05, | |
| "loss": 14.1456, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.116, | |
| "grad_norm": 33.0, | |
| "learning_rate": 1.9984407641819812e-05, | |
| "loss": 14.0944, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 31.0, | |
| "learning_rate": 1.9975640502598243e-05, | |
| "loss": 14.7894, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.124, | |
| "grad_norm": 26.75, | |
| "learning_rate": 1.9964928592495046e-05, | |
| "loss": 14.2806, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 36.0, | |
| "learning_rate": 1.9952273999818312e-05, | |
| "loss": 14.3856, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.132, | |
| "grad_norm": 24.5, | |
| "learning_rate": 1.9937679191605964e-05, | |
| "loss": 14.24, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.136, | |
| "grad_norm": 30.625, | |
| "learning_rate": 1.9921147013144782e-05, | |
| "loss": 13.9409, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 28.0, | |
| "learning_rate": 1.9902680687415704e-05, | |
| "loss": 13.7884, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 30.0, | |
| "learning_rate": 1.988228381446553e-05, | |
| "loss": 13.975, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.148, | |
| "grad_norm": 50.0, | |
| "learning_rate": 1.985996037070505e-05, | |
| "loss": 13.902, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.152, | |
| "grad_norm": 33.25, | |
| "learning_rate": 1.983571470813386e-05, | |
| "loss": 14.394, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.156, | |
| "grad_norm": 30.25, | |
| "learning_rate": 1.9809551553491918e-05, | |
| "loss": 14.2122, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 33.0, | |
| "learning_rate": 1.9781476007338058e-05, | |
| "loss": 14.048, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.164, | |
| "grad_norm": 27.75, | |
| "learning_rate": 1.9751493543055634e-05, | |
| "loss": 14.3168, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.168, | |
| "grad_norm": 25.0, | |
| "learning_rate": 1.9719610005785466e-05, | |
| "loss": 14.0735, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.172, | |
| "grad_norm": 25.5, | |
| "learning_rate": 1.9685831611286312e-05, | |
| "loss": 14.1113, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 28.375, | |
| "learning_rate": 1.9650164944723116e-05, | |
| "loss": 14.3061, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 32.75, | |
| "learning_rate": 1.961261695938319e-05, | |
| "loss": 13.619, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.184, | |
| "grad_norm": 31.25, | |
| "learning_rate": 1.9573194975320672e-05, | |
| "loss": 14.4487, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.188, | |
| "grad_norm": 29.0, | |
| "learning_rate": 1.9531906677929472e-05, | |
| "loss": 13.936, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 24.0, | |
| "learning_rate": 1.9488760116444966e-05, | |
| "loss": 14.0659, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.196, | |
| "grad_norm": 26.625, | |
| "learning_rate": 1.944376370237481e-05, | |
| "loss": 13.7547, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 28.0, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 13.8495, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.204, | |
| "grad_norm": 26.75, | |
| "learning_rate": 1.9348256763960146e-05, | |
| "loss": 13.625, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 24.875, | |
| "learning_rate": 1.9297764858882516e-05, | |
| "loss": 13.6376, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.212, | |
| "grad_norm": 25.625, | |
| "learning_rate": 1.9245460336123136e-05, | |
| "loss": 13.6954, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.216, | |
| "grad_norm": 26.875, | |
| "learning_rate": 1.9191353392552346e-05, | |
| "loss": 13.8046, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 22.5, | |
| "learning_rate": 1.913545457642601e-05, | |
| "loss": 13.4518, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 26.125, | |
| "learning_rate": 1.907777478532909e-05, | |
| "loss": 13.6853, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.228, | |
| "grad_norm": 28.5, | |
| "learning_rate": 1.901832526405114e-05, | |
| "loss": 13.5204, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.232, | |
| "grad_norm": 24.375, | |
| "learning_rate": 1.895711760239413e-05, | |
| "loss": 13.5718, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.236, | |
| "grad_norm": 28.375, | |
| "learning_rate": 1.889416373291298e-05, | |
| "loss": 13.2873, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 26.75, | |
| "learning_rate": 1.8829475928589272e-05, | |
| "loss": 13.448, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.244, | |
| "grad_norm": 21.375, | |
| "learning_rate": 1.8763066800438638e-05, | |
| "loss": 12.6493, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.248, | |
| "grad_norm": 26.125, | |
| "learning_rate": 1.869494929505219e-05, | |
| "loss": 12.9608, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.252, | |
| "grad_norm": 28.5, | |
| "learning_rate": 1.8625136692072577e-05, | |
| "loss": 12.5389, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 25.0, | |
| "learning_rate": 1.855364260160507e-05, | |
| "loss": 13.3493, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 24.25, | |
| "learning_rate": 1.848048096156426e-05, | |
| "loss": 13.155, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.264, | |
| "grad_norm": 34.5, | |
| "learning_rate": 1.8405666034956842e-05, | |
| "loss": 13.3605, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.268, | |
| "grad_norm": 25.0, | |
| "learning_rate": 1.8329212407100996e-05, | |
| "loss": 13.2862, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 24.125, | |
| "learning_rate": 1.8251134982782952e-05, | |
| "loss": 13.1185, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.276, | |
| "grad_norm": 22.625, | |
| "learning_rate": 1.8171448983351284e-05, | |
| "loss": 12.7952, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 35.0, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 13.311, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.284, | |
| "grad_norm": 34.0, | |
| "learning_rate": 1.8007313709487334e-05, | |
| "loss": 13.0198, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 28.25, | |
| "learning_rate": 1.792289643355191e-05, | |
| "loss": 12.7704, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.292, | |
| "grad_norm": 23.125, | |
| "learning_rate": 1.78369345732584e-05, | |
| "loss": 12.857, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.296, | |
| "grad_norm": 23.5, | |
| "learning_rate": 1.7749444887041797e-05, | |
| "loss": 13.166, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 22.875, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 12.8836, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 28.75, | |
| "learning_rate": 1.7569950556517566e-05, | |
| "loss": 12.9149, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.308, | |
| "grad_norm": 28.0, | |
| "learning_rate": 1.747798090498532e-05, | |
| "loss": 12.6713, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.312, | |
| "grad_norm": 23.625, | |
| "learning_rate": 1.7384553406258842e-05, | |
| "loss": 12.755, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.316, | |
| "grad_norm": 23.375, | |
| "learning_rate": 1.7289686274214116e-05, | |
| "loss": 12.7228, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 23.625, | |
| "learning_rate": 1.7193398003386514e-05, | |
| "loss": 12.8029, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.324, | |
| "grad_norm": 29.75, | |
| "learning_rate": 1.709570736536521e-05, | |
| "loss": 13.0532, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.328, | |
| "grad_norm": 35.0, | |
| "learning_rate": 1.6996633405133656e-05, | |
| "loss": 11.925, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.332, | |
| "grad_norm": 29.25, | |
| "learning_rate": 1.68961954373567e-05, | |
| "loss": 13.2527, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 27.625, | |
| "learning_rate": 1.6794413042615168e-05, | |
| "loss": 12.9308, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 26.25, | |
| "learning_rate": 1.6691306063588583e-05, | |
| "loss": 12.9368, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.344, | |
| "grad_norm": 21.125, | |
| "learning_rate": 1.6586894601186804e-05, | |
| "loss": 12.3981, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.348, | |
| "grad_norm": 21.125, | |
| "learning_rate": 1.6481199010631312e-05, | |
| "loss": 12.7891, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 19.75, | |
| "learning_rate": 1.63742398974869e-05, | |
| "loss": 12.5167, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.356, | |
| "grad_norm": 26.875, | |
| "learning_rate": 1.6266038113644605e-05, | |
| "loss": 12.6286, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 19.875, | |
| "learning_rate": 1.6156614753256583e-05, | |
| "loss": 12.5022, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.364, | |
| "grad_norm": 22.625, | |
| "learning_rate": 1.6045991148623752e-05, | |
| "loss": 12.3728, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 29.0, | |
| "learning_rate": 1.5934188866037017e-05, | |
| "loss": 12.5975, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.372, | |
| "grad_norm": 24.0, | |
| "learning_rate": 1.5821229701572897e-05, | |
| "loss": 12.543, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.376, | |
| "grad_norm": 22.875, | |
| "learning_rate": 1.570713567684432e-05, | |
| "loss": 12.4243, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 24.0, | |
| "learning_rate": 1.5591929034707468e-05, | |
| "loss": 12.5414, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 22.125, | |
| "learning_rate": 1.5475632234925505e-05, | |
| "loss": 12.7494, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.388, | |
| "grad_norm": 22.375, | |
| "learning_rate": 1.5358267949789968e-05, | |
| "loss": 12.5341, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.392, | |
| "grad_norm": 19.875, | |
| "learning_rate": 1.5239859059700794e-05, | |
| "loss": 12.2114, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.396, | |
| "grad_norm": 24.125, | |
| "learning_rate": 1.5120428648705716e-05, | |
| "loss": 12.5967, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 27.375, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 12.4666, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.404, | |
| "grad_norm": 19.875, | |
| "learning_rate": 1.4878596591387329e-05, | |
| "loss": 12.467, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.408, | |
| "grad_norm": 20.625, | |
| "learning_rate": 1.4756242090702756e-05, | |
| "loss": 12.5511, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.412, | |
| "grad_norm": 20.5, | |
| "learning_rate": 1.463296035119862e-05, | |
| "loss": 12.6373, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 17.25, | |
| "learning_rate": 1.4508775406894308e-05, | |
| "loss": 12.2578, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 18.875, | |
| "learning_rate": 1.4383711467890776e-05, | |
| "loss": 12.1735, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.424, | |
| "grad_norm": 44.5, | |
| "learning_rate": 1.4257792915650728e-05, | |
| "loss": 12.3356, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.428, | |
| "grad_norm": 20.875, | |
| "learning_rate": 1.413104429824542e-05, | |
| "loss": 12.2474, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 35.5, | |
| "learning_rate": 1.4003490325568953e-05, | |
| "loss": 12.1413, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.436, | |
| "grad_norm": 17.125, | |
| "learning_rate": 1.3875155864521031e-05, | |
| "loss": 12.3009, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 16.125, | |
| "learning_rate": 1.3746065934159123e-05, | |
| "loss": 12.5468, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.444, | |
| "grad_norm": 17.125, | |
| "learning_rate": 1.3616245700820922e-05, | |
| "loss": 12.4364, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 18.125, | |
| "learning_rate": 1.3485720473218153e-05, | |
| "loss": 12.5854, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.452, | |
| "grad_norm": 18.75, | |
| "learning_rate": 1.3354515697502552e-05, | |
| "loss": 12.1944, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.456, | |
| "grad_norm": 28.5, | |
| "learning_rate": 1.3222656952305113e-05, | |
| "loss": 11.8036, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 19.25, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 12.5166, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 18.125, | |
| "learning_rate": 1.2957080500440469e-05, | |
| "loss": 12.3671, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.468, | |
| "grad_norm": 20.25, | |
| "learning_rate": 1.2823414568428767e-05, | |
| "loss": 12.1979, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.472, | |
| "grad_norm": 19.25, | |
| "learning_rate": 1.2689198206152657e-05, | |
| "loss": 11.9643, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.476, | |
| "grad_norm": 20.125, | |
| "learning_rate": 1.2554457579357906e-05, | |
| "loss": 12.1654, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 23.375, | |
| "learning_rate": 1.2419218955996677e-05, | |
| "loss": 12.1331, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.484, | |
| "grad_norm": 22.0, | |
| "learning_rate": 1.2283508701106559e-05, | |
| "loss": 11.7672, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.488, | |
| "grad_norm": 20.125, | |
| "learning_rate": 1.2147353271670634e-05, | |
| "loss": 12.0773, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.492, | |
| "grad_norm": 34.75, | |
| "learning_rate": 1.2010779211459649e-05, | |
| "loss": 12.4496, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 18.0, | |
| "learning_rate": 1.187381314585725e-05, | |
| "loss": 12.0919, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 19.125, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 12.2576, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.504, | |
| "grad_norm": 17.25, | |
| "learning_rate": 1.159881187691835e-05, | |
| "loss": 12.5035, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.508, | |
| "grad_norm": 17.875, | |
| "learning_rate": 1.1460830285624119e-05, | |
| "loss": 12.186, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 20.0, | |
| "learning_rate": 1.1322563902571227e-05, | |
| "loss": 12.2628, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.516, | |
| "grad_norm": 23.125, | |
| "learning_rate": 1.1184039683065014e-05, | |
| "loss": 12.1192, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 19.5, | |
| "learning_rate": 1.1045284632676535e-05, | |
| "loss": 11.4056, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.524, | |
| "grad_norm": 18.375, | |
| "learning_rate": 1.0906325801977804e-05, | |
| "loss": 11.909, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 18.375, | |
| "learning_rate": 1.0767190281268187e-05, | |
| "loss": 11.9674, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.532, | |
| "grad_norm": 22.0, | |
| "learning_rate": 1.0627905195293135e-05, | |
| "loss": 11.8993, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.536, | |
| "grad_norm": 17.5, | |
| "learning_rate": 1.0488497697956134e-05, | |
| "loss": 12.0954, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 17.125, | |
| "learning_rate": 1.0348994967025012e-05, | |
| "loss": 11.6242, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 17.375, | |
| "learning_rate": 1.0209424198833571e-05, | |
| "loss": 12.0752, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.548, | |
| "grad_norm": 20.375, | |
| "learning_rate": 1.0069812602979617e-05, | |
| "loss": 11.6246, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.552, | |
| "grad_norm": 19.125, | |
| "learning_rate": 9.930187397020385e-06, | |
| "loss": 12.0839, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.556, | |
| "grad_norm": 20.125, | |
| "learning_rate": 9.790575801166432e-06, | |
| "loss": 12.2101, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 18.875, | |
| "learning_rate": 9.651005032974994e-06, | |
| "loss": 11.9135, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.564, | |
| "grad_norm": 20.25, | |
| "learning_rate": 9.511502302043867e-06, | |
| "loss": 12.0097, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.568, | |
| "grad_norm": 24.875, | |
| "learning_rate": 9.372094804706867e-06, | |
| "loss": 11.8345, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.572, | |
| "grad_norm": 18.625, | |
| "learning_rate": 9.232809718731815e-06, | |
| "loss": 11.9064, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 18.75, | |
| "learning_rate": 9.093674198022201e-06, | |
| "loss": 11.9602, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 18.125, | |
| "learning_rate": 8.954715367323468e-06, | |
| "loss": 11.8622, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.584, | |
| "grad_norm": 15.8125, | |
| "learning_rate": 8.815960316934991e-06, | |
| "loss": 11.7525, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.588, | |
| "grad_norm": 19.625, | |
| "learning_rate": 8.677436097428775e-06, | |
| "loss": 11.6101, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 16.125, | |
| "learning_rate": 8.539169714375885e-06, | |
| "loss": 11.9193, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.596, | |
| "grad_norm": 17.375, | |
| "learning_rate": 8.401188123081653e-06, | |
| "loss": 11.8304, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 24.375, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 11.6055, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.604, | |
| "grad_norm": 18.375, | |
| "learning_rate": 8.126186854142752e-06, | |
| "loss": 11.714, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 16.375, | |
| "learning_rate": 7.989220788540356e-06, | |
| "loss": 12.3469, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.612, | |
| "grad_norm": 22.5, | |
| "learning_rate": 7.852646728329368e-06, | |
| "loss": 11.4557, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.616, | |
| "grad_norm": 18.0, | |
| "learning_rate": 7.716491298893443e-06, | |
| "loss": 11.6784, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 16.75, | |
| "learning_rate": 7.580781044003324e-06, | |
| "loss": 11.7863, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 16.25, | |
| "learning_rate": 7.445542420642097e-06, | |
| "loss": 11.4727, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.628, | |
| "grad_norm": 14.125, | |
| "learning_rate": 7.310801793847344e-06, | |
| "loss": 11.6379, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.632, | |
| "grad_norm": 18.0, | |
| "learning_rate": 7.176585431571235e-06, | |
| "loss": 11.9072, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.636, | |
| "grad_norm": 17.75, | |
| "learning_rate": 7.042919499559538e-06, | |
| "loss": 11.67, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 15.5625, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 12.1847, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.644, | |
| "grad_norm": 17.125, | |
| "learning_rate": 6.777343047694891e-06, | |
| "loss": 11.534, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.648, | |
| "grad_norm": 13.75, | |
| "learning_rate": 6.645484302497452e-06, | |
| "loss": 11.8197, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.652, | |
| "grad_norm": 14.75, | |
| "learning_rate": 6.5142795267818505e-06, | |
| "loss": 11.2645, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 20.75, | |
| "learning_rate": 6.383754299179079e-06, | |
| "loss": 11.805, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 14.5, | |
| "learning_rate": 6.25393406584088e-06, | |
| "loss": 11.5875, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.664, | |
| "grad_norm": 15.3125, | |
| "learning_rate": 6.124844135478971e-06, | |
| "loss": 11.4167, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.668, | |
| "grad_norm": 21.5, | |
| "learning_rate": 5.996509674431053e-06, | |
| "loss": 11.6036, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 16.75, | |
| "learning_rate": 5.868955701754584e-06, | |
| "loss": 12.2395, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.676, | |
| "grad_norm": 13.0, | |
| "learning_rate": 5.742207084349274e-06, | |
| "loss": 11.8518, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 24.125, | |
| "learning_rate": 5.616288532109225e-06, | |
| "loss": 11.8566, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.684, | |
| "grad_norm": 14.4375, | |
| "learning_rate": 5.491224593105695e-06, | |
| "loss": 11.4311, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 13.875, | |
| "learning_rate": 5.367039648801386e-06, | |
| "loss": 11.3975, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.692, | |
| "grad_norm": 18.5, | |
| "learning_rate": 5.243757909297247e-06, | |
| "loss": 11.5973, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.696, | |
| "grad_norm": 18.25, | |
| "learning_rate": 5.121403408612672e-06, | |
| "loss": 11.7354, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 15.375, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 11.8648, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 17.75, | |
| "learning_rate": 4.879571351294287e-06, | |
| "loss": 11.788, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.708, | |
| "grad_norm": 18.75, | |
| "learning_rate": 4.76014094029921e-06, | |
| "loss": 11.8052, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.712, | |
| "grad_norm": 16.0, | |
| "learning_rate": 4.641732050210032e-06, | |
| "loss": 11.473, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.716, | |
| "grad_norm": 14.1875, | |
| "learning_rate": 4.524367765074499e-06, | |
| "loss": 11.3811, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 14.4375, | |
| "learning_rate": 4.408070965292534e-06, | |
| "loss": 11.2224, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.724, | |
| "grad_norm": 19.125, | |
| "learning_rate": 4.292864323155684e-06, | |
| "loss": 11.3153, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.728, | |
| "grad_norm": 17.0, | |
| "learning_rate": 4.178770298427107e-06, | |
| "loss": 11.5543, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.732, | |
| "grad_norm": 16.125, | |
| "learning_rate": 4.065811133962987e-06, | |
| "loss": 11.6786, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 17.75, | |
| "learning_rate": 3.954008851376252e-06, | |
| "loss": 11.5551, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 15.75, | |
| "learning_rate": 3.8433852467434175e-06, | |
| "loss": 11.4994, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.744, | |
| "grad_norm": 15.0, | |
| "learning_rate": 3.7339618863553983e-06, | |
| "loss": 11.7822, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.748, | |
| "grad_norm": 16.25, | |
| "learning_rate": 3.625760102513103e-06, | |
| "loss": 11.7441, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 16.125, | |
| "learning_rate": 3.5188009893686916e-06, | |
| "loss": 11.7047, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.756, | |
| "grad_norm": 14.875, | |
| "learning_rate": 3.4131053988131947e-06, | |
| "loss": 11.6164, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 13.875, | |
| "learning_rate": 3.308693936411421e-06, | |
| "loss": 11.8134, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.764, | |
| "grad_norm": 16.125, | |
| "learning_rate": 3.2055869573848374e-06, | |
| "loss": 11.8393, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 16.875, | |
| "learning_rate": 3.103804562643302e-06, | |
| "loss": 12.1629, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.772, | |
| "grad_norm": 16.25, | |
| "learning_rate": 3.003366594866345e-06, | |
| "loss": 11.5428, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.776, | |
| "grad_norm": 15.625, | |
| "learning_rate": 2.9042926346347932e-06, | |
| "loss": 11.3288, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 14.625, | |
| "learning_rate": 2.8066019966134907e-06, | |
| "loss": 11.6534, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 16.125, | |
| "learning_rate": 2.7103137257858867e-06, | |
| "loss": 11.4254, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.788, | |
| "grad_norm": 17.25, | |
| "learning_rate": 2.615446593741161e-06, | |
| "loss": 11.8175, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.792, | |
| "grad_norm": 13.8125, | |
| "learning_rate": 2.522019095014683e-06, | |
| "loss": 11.6752, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.796, | |
| "grad_norm": 18.0, | |
| "learning_rate": 2.4300494434824373e-06, | |
| "loss": 11.692, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 13.5, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 11.5219, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.804, | |
| "grad_norm": 14.9375, | |
| "learning_rate": 2.2505551129582047e-06, | |
| "loss": 11.4298, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.808, | |
| "grad_norm": 17.125, | |
| "learning_rate": 2.163065426741603e-06, | |
| "loss": 11.5041, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.812, | |
| "grad_norm": 15.25, | |
| "learning_rate": 2.0771035664480944e-06, | |
| "loss": 11.5272, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.816, | |
| "grad_norm": 16.75, | |
| "learning_rate": 1.9926862905126663e-06, | |
| "loss": 11.3027, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 14.875, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 11.6911, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.824, | |
| "grad_norm": 13.375, | |
| "learning_rate": 1.8285510166487154e-06, | |
| "loss": 11.2328, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.828, | |
| "grad_norm": 14.75, | |
| "learning_rate": 1.7488650172170496e-06, | |
| "loss": 11.7217, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 14.0625, | |
| "learning_rate": 1.6707875928990059e-06, | |
| "loss": 11.7367, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.836, | |
| "grad_norm": 13.5, | |
| "learning_rate": 1.5943339650431578e-06, | |
| "loss": 11.7986, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 17.25, | |
| "learning_rate": 1.5195190384357405e-06, | |
| "loss": 11.6699, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.844, | |
| "grad_norm": 16.5, | |
| "learning_rate": 1.446357398394934e-06, | |
| "loss": 11.8443, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.848, | |
| "grad_norm": 14.0, | |
| "learning_rate": 1.3748633079274254e-06, | |
| "loss": 11.8157, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.852, | |
| "grad_norm": 13.75, | |
| "learning_rate": 1.30505070494781e-06, | |
| "loss": 11.8902, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.856, | |
| "grad_norm": 13.75, | |
| "learning_rate": 1.2369331995613664e-06, | |
| "loss": 12.1189, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 14.1875, | |
| "learning_rate": 1.1705240714107301e-06, | |
| "loss": 11.692, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.864, | |
| "grad_norm": 13.625, | |
| "learning_rate": 1.1058362670870248e-06, | |
| "loss": 11.5978, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.868, | |
| "grad_norm": 13.875, | |
| "learning_rate": 1.042882397605871e-06, | |
| "loss": 11.6953, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.872, | |
| "grad_norm": 13.4375, | |
| "learning_rate": 9.816747359488632e-07, | |
| "loss": 11.4016, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.876, | |
| "grad_norm": 16.0, | |
| "learning_rate": 9.222252146709143e-07, | |
| "loss": 11.4873, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 15.625, | |
| "learning_rate": 8.645454235739903e-07, | |
| "loss": 11.2464, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.884, | |
| "grad_norm": 17.875, | |
| "learning_rate": 8.086466074476562e-07, | |
| "loss": 11.8022, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.888, | |
| "grad_norm": 12.5, | |
| "learning_rate": 7.545396638768698e-07, | |
| "loss": 11.778, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.892, | |
| "grad_norm": 16.0, | |
| "learning_rate": 7.022351411174866e-07, | |
| "loss": 11.5754, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 15.0, | |
| "learning_rate": 6.517432360398556e-07, | |
| "loss": 11.9485, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 16.125, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 11.8117, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.904, | |
| "grad_norm": 17.0, | |
| "learning_rate": 5.562362976251901e-07, | |
| "loss": 11.3828, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.908, | |
| "grad_norm": 17.25, | |
| "learning_rate": 5.112398835550348e-07, | |
| "loss": 11.5017, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.912, | |
| "grad_norm": 13.625, | |
| "learning_rate": 4.6809332207053083e-07, | |
| "loss": 11.886, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.916, | |
| "grad_norm": 16.625, | |
| "learning_rate": 4.268050246793276e-07, | |
| "loss": 11.5668, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 16.875, | |
| "learning_rate": 3.8738304061681107e-07, | |
| "loss": 11.5818, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.924, | |
| "grad_norm": 15.1875, | |
| "learning_rate": 3.498350552768859e-07, | |
| "loss": 11.2704, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.928, | |
| "grad_norm": 15.875, | |
| "learning_rate": 3.1416838871368925e-07, | |
| "loss": 11.5949, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.932, | |
| "grad_norm": 15.5625, | |
| "learning_rate": 2.8038999421453827e-07, | |
| "loss": 11.0745, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.936, | |
| "grad_norm": 18.375, | |
| "learning_rate": 2.4850645694436736e-07, | |
| "loss": 11.762, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 12.875, | |
| "learning_rate": 2.1852399266194312e-07, | |
| "loss": 11.7735, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.944, | |
| "grad_norm": 17.75, | |
| "learning_rate": 1.9044844650808468e-07, | |
| "loss": 11.578, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.948, | |
| "grad_norm": 17.375, | |
| "learning_rate": 1.6428529186614195e-07, | |
| "loss": 11.8724, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.952, | |
| "grad_norm": 15.5, | |
| "learning_rate": 1.400396292949513e-07, | |
| "loss": 11.4199, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.956, | |
| "grad_norm": 14.5, | |
| "learning_rate": 1.1771618553447217e-07, | |
| "loss": 11.3379, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 14.5625, | |
| "learning_rate": 9.731931258429638e-08, | |
| "loss": 11.7067, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.964, | |
| "grad_norm": 16.125, | |
| "learning_rate": 7.885298685522235e-08, | |
| "loss": 11.8713, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.968, | |
| "grad_norm": 14.5, | |
| "learning_rate": 6.232080839403631e-08, | |
| "loss": 10.8336, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.972, | |
| "grad_norm": 13.0, | |
| "learning_rate": 4.772600018168816e-08, | |
| "loss": 11.7092, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.976, | |
| "grad_norm": 15.0625, | |
| "learning_rate": 3.50714075049563e-08, | |
| "loss": 11.4174, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 15.0625, | |
| "learning_rate": 2.4359497401758026e-08, | |
| "loss": 11.9475, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.984, | |
| "grad_norm": 14.6875, | |
| "learning_rate": 1.5592358180189782e-08, | |
| "loss": 11.7502, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.988, | |
| "grad_norm": 13.75, | |
| "learning_rate": 8.771699011416169e-09, | |
| "loss": 11.3247, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.992, | |
| "grad_norm": 14.0625, | |
| "learning_rate": 3.898849596456477e-09, | |
| "loss": 11.4935, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.996, | |
| "grad_norm": 13.8125, | |
| "learning_rate": 9.74759906957612e-10, | |
| "loss": 11.274, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 14.8125, | |
| "learning_rate": 0.0, | |
| "loss": 11.0717, | |
| "step": 2500 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2500, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 2500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.4405861564416e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |