| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 12500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0016, | |
| "grad_norm": 5.821266174316406, | |
| "learning_rate": 7.200000000000001e-08, | |
| "loss": 0.9155, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0032, | |
| "grad_norm": 4.251969814300537, | |
| "learning_rate": 1.52e-07, | |
| "loss": 0.9536, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0048, | |
| "grad_norm": 5.058816909790039, | |
| "learning_rate": 2.32e-07, | |
| "loss": 0.8432, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0064, | |
| "grad_norm": 3.77224063873291, | |
| "learning_rate": 3.12e-07, | |
| "loss": 0.8303, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.008, | |
| "grad_norm": 3.0052578449249268, | |
| "learning_rate": 3.92e-07, | |
| "loss": 0.8548, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0096, | |
| "grad_norm": 2.364137649536133, | |
| "learning_rate": 4.7200000000000004e-07, | |
| "loss": 0.886, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0112, | |
| "grad_norm": 2.3058550357818604, | |
| "learning_rate": 5.520000000000001e-07, | |
| "loss": 0.829, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0128, | |
| "grad_norm": 2.606264352798462, | |
| "learning_rate": 6.320000000000002e-07, | |
| "loss": 0.7435, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0144, | |
| "grad_norm": 2.318427085876465, | |
| "learning_rate": 7.12e-07, | |
| "loss": 0.7742, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 2.2760097980499268, | |
| "learning_rate": 7.920000000000001e-07, | |
| "loss": 0.748, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0176, | |
| "grad_norm": 2.4790289402008057, | |
| "learning_rate": 8.720000000000001e-07, | |
| "loss": 0.7503, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0192, | |
| "grad_norm": 1.7304102182388306, | |
| "learning_rate": 9.520000000000002e-07, | |
| "loss": 0.6767, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.0208, | |
| "grad_norm": 1.7321223020553589, | |
| "learning_rate": 1.032e-06, | |
| "loss": 0.6776, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.0224, | |
| "grad_norm": 1.9701182842254639, | |
| "learning_rate": 1.1120000000000001e-06, | |
| "loss": 0.6816, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.024, | |
| "grad_norm": 1.7989563941955566, | |
| "learning_rate": 1.1920000000000002e-06, | |
| "loss": 0.7141, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.0256, | |
| "grad_norm": 1.5565420389175415, | |
| "learning_rate": 1.2720000000000003e-06, | |
| "loss": 0.6901, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.0272, | |
| "grad_norm": 1.8955837488174438, | |
| "learning_rate": 1.352e-06, | |
| "loss": 0.6244, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0288, | |
| "grad_norm": 1.7556990385055542, | |
| "learning_rate": 1.432e-06, | |
| "loss": 0.6205, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0304, | |
| "grad_norm": 1.8270255327224731, | |
| "learning_rate": 1.512e-06, | |
| "loss": 0.604, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 2.064455986022949, | |
| "learning_rate": 1.5920000000000002e-06, | |
| "loss": 0.6536, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0336, | |
| "grad_norm": 1.8907413482666016, | |
| "learning_rate": 1.672e-06, | |
| "loss": 0.6889, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0352, | |
| "grad_norm": 1.9259617328643799, | |
| "learning_rate": 1.7520000000000001e-06, | |
| "loss": 0.649, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.0368, | |
| "grad_norm": 1.8601033687591553, | |
| "learning_rate": 1.8320000000000002e-06, | |
| "loss": 0.6858, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.0384, | |
| "grad_norm": 2.224977970123291, | |
| "learning_rate": 1.912e-06, | |
| "loss": 0.6659, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.1067371368408203, | |
| "learning_rate": 1.992e-06, | |
| "loss": 0.6503, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.0416, | |
| "grad_norm": 1.68132746219635, | |
| "learning_rate": 2.0720000000000002e-06, | |
| "loss": 0.6514, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.0432, | |
| "grad_norm": 1.469983696937561, | |
| "learning_rate": 2.1520000000000003e-06, | |
| "loss": 0.6162, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.0448, | |
| "grad_norm": 2.2377686500549316, | |
| "learning_rate": 2.2320000000000004e-06, | |
| "loss": 0.5814, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.0464, | |
| "grad_norm": 1.7025582790374756, | |
| "learning_rate": 2.312e-06, | |
| "loss": 0.631, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 2.0070271492004395, | |
| "learning_rate": 2.392e-06, | |
| "loss": 0.6457, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0496, | |
| "grad_norm": 1.4129276275634766, | |
| "learning_rate": 2.4720000000000002e-06, | |
| "loss": 0.5784, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.0512, | |
| "grad_norm": 1.776331901550293, | |
| "learning_rate": 2.552e-06, | |
| "loss": 0.6232, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.0528, | |
| "grad_norm": 1.6155009269714355, | |
| "learning_rate": 2.632e-06, | |
| "loss": 0.5892, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.0544, | |
| "grad_norm": 1.739883303642273, | |
| "learning_rate": 2.712e-06, | |
| "loss": 0.6007, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.056, | |
| "grad_norm": 1.842551827430725, | |
| "learning_rate": 2.792e-06, | |
| "loss": 0.6498, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.0576, | |
| "grad_norm": 1.7769681215286255, | |
| "learning_rate": 2.872e-06, | |
| "loss": 0.579, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.0592, | |
| "grad_norm": 2.0511562824249268, | |
| "learning_rate": 2.9520000000000003e-06, | |
| "loss": 0.5703, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.0608, | |
| "grad_norm": 2.1702704429626465, | |
| "learning_rate": 3.0320000000000004e-06, | |
| "loss": 0.65, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.0624, | |
| "grad_norm": 1.8601093292236328, | |
| "learning_rate": 3.112e-06, | |
| "loss": 0.6134, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 1.8366857767105103, | |
| "learning_rate": 3.192e-06, | |
| "loss": 0.5787, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.0656, | |
| "grad_norm": 2.1382994651794434, | |
| "learning_rate": 3.272e-06, | |
| "loss": 0.5702, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.0672, | |
| "grad_norm": 1.7163138389587402, | |
| "learning_rate": 3.3520000000000003e-06, | |
| "loss": 0.6287, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0688, | |
| "grad_norm": 1.9122556447982788, | |
| "learning_rate": 3.4320000000000003e-06, | |
| "loss": 0.6515, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.0704, | |
| "grad_norm": 1.5989375114440918, | |
| "learning_rate": 3.5120000000000004e-06, | |
| "loss": 0.5779, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.072, | |
| "grad_norm": 1.4073443412780762, | |
| "learning_rate": 3.5920000000000005e-06, | |
| "loss": 0.5825, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.0736, | |
| "grad_norm": 1.3949495553970337, | |
| "learning_rate": 3.6720000000000006e-06, | |
| "loss": 0.5795, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.0752, | |
| "grad_norm": 1.919480800628662, | |
| "learning_rate": 3.7520000000000002e-06, | |
| "loss": 0.6778, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.0768, | |
| "grad_norm": 1.8749704360961914, | |
| "learning_rate": 3.832e-06, | |
| "loss": 0.5133, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.0784, | |
| "grad_norm": 1.7727798223495483, | |
| "learning_rate": 3.912e-06, | |
| "loss": 0.6414, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.003614664077759, | |
| "learning_rate": 3.992e-06, | |
| "loss": 0.6297, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0816, | |
| "grad_norm": 1.6000868082046509, | |
| "learning_rate": 4.072e-06, | |
| "loss": 0.5397, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.0832, | |
| "grad_norm": 1.6405130624771118, | |
| "learning_rate": 4.152e-06, | |
| "loss": 0.5832, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.0848, | |
| "grad_norm": 1.4296232461929321, | |
| "learning_rate": 4.232e-06, | |
| "loss": 0.5714, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.0864, | |
| "grad_norm": 1.4758542776107788, | |
| "learning_rate": 4.312e-06, | |
| "loss": 0.6051, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.088, | |
| "grad_norm": 2.095282793045044, | |
| "learning_rate": 4.3920000000000005e-06, | |
| "loss": 0.6018, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.0896, | |
| "grad_norm": 2.1141610145568848, | |
| "learning_rate": 4.4720000000000006e-06, | |
| "loss": 0.61, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.0912, | |
| "grad_norm": 2.1658644676208496, | |
| "learning_rate": 4.552000000000001e-06, | |
| "loss": 0.577, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.0928, | |
| "grad_norm": 2.0406646728515625, | |
| "learning_rate": 4.632000000000001e-06, | |
| "loss": 0.6029, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.0944, | |
| "grad_norm": 1.7191849946975708, | |
| "learning_rate": 4.712000000000001e-06, | |
| "loss": 0.665, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 1.4821583032608032, | |
| "learning_rate": 4.792000000000001e-06, | |
| "loss": 0.5824, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.0976, | |
| "grad_norm": 1.7688212394714355, | |
| "learning_rate": 4.872000000000001e-06, | |
| "loss": 0.6036, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.0992, | |
| "grad_norm": 1.5875694751739502, | |
| "learning_rate": 4.952e-06, | |
| "loss": 0.5781, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.1008, | |
| "grad_norm": 1.7072473764419556, | |
| "learning_rate": 5.032e-06, | |
| "loss": 0.5856, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.1024, | |
| "grad_norm": 1.9579156637191772, | |
| "learning_rate": 5.112e-06, | |
| "loss": 0.6031, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.104, | |
| "grad_norm": 1.6670901775360107, | |
| "learning_rate": 5.1920000000000004e-06, | |
| "loss": 0.6107, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.1056, | |
| "grad_norm": 2.026085376739502, | |
| "learning_rate": 5.2720000000000005e-06, | |
| "loss": 0.6438, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.1072, | |
| "grad_norm": 1.4940062761306763, | |
| "learning_rate": 5.352000000000001e-06, | |
| "loss": 0.5096, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.1088, | |
| "grad_norm": 2.0336623191833496, | |
| "learning_rate": 5.432000000000001e-06, | |
| "loss": 0.6316, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.1104, | |
| "grad_norm": 1.7248139381408691, | |
| "learning_rate": 5.512000000000001e-06, | |
| "loss": 0.5038, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 1.7868162393569946, | |
| "learning_rate": 5.592000000000001e-06, | |
| "loss": 0.575, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.1136, | |
| "grad_norm": 1.690685510635376, | |
| "learning_rate": 5.672000000000001e-06, | |
| "loss": 0.6119, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.1152, | |
| "grad_norm": 1.551312804222107, | |
| "learning_rate": 5.752000000000001e-06, | |
| "loss": 0.5848, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.1168, | |
| "grad_norm": 1.8913275003433228, | |
| "learning_rate": 5.832000000000001e-06, | |
| "loss": 0.5581, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.1184, | |
| "grad_norm": 1.849355697631836, | |
| "learning_rate": 5.912e-06, | |
| "loss": 0.5751, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.9218946695327759, | |
| "learning_rate": 5.992e-06, | |
| "loss": 0.5531, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.1216, | |
| "grad_norm": 2.1030099391937256, | |
| "learning_rate": 6.0720000000000005e-06, | |
| "loss": 0.6318, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.1232, | |
| "grad_norm": 1.7341113090515137, | |
| "learning_rate": 6.1520000000000006e-06, | |
| "loss": 0.6178, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.1248, | |
| "grad_norm": 1.5996953248977661, | |
| "learning_rate": 6.232000000000001e-06, | |
| "loss": 0.5342, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.1264, | |
| "grad_norm": 1.8249562978744507, | |
| "learning_rate": 6.312000000000001e-06, | |
| "loss": 0.5594, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 1.7191553115844727, | |
| "learning_rate": 6.392000000000001e-06, | |
| "loss": 0.6324, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.1296, | |
| "grad_norm": 1.5428087711334229, | |
| "learning_rate": 6.472000000000001e-06, | |
| "loss": 0.5388, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.1312, | |
| "grad_norm": 1.4592149257659912, | |
| "learning_rate": 6.552000000000001e-06, | |
| "loss": 0.5753, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.1328, | |
| "grad_norm": 2.0682921409606934, | |
| "learning_rate": 6.632000000000001e-06, | |
| "loss": 0.5704, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.1344, | |
| "grad_norm": 1.7124565839767456, | |
| "learning_rate": 6.712000000000001e-06, | |
| "loss": 0.5832, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.136, | |
| "grad_norm": 1.7662416696548462, | |
| "learning_rate": 6.792000000000001e-06, | |
| "loss": 0.5188, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.1376, | |
| "grad_norm": 1.7473351955413818, | |
| "learning_rate": 6.872000000000001e-06, | |
| "loss": 0.5567, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.1392, | |
| "grad_norm": 1.2985845804214478, | |
| "learning_rate": 6.952000000000001e-06, | |
| "loss": 0.545, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.1408, | |
| "grad_norm": 2.312054395675659, | |
| "learning_rate": 7.0320000000000015e-06, | |
| "loss": 0.5752, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.1424, | |
| "grad_norm": 1.7196606397628784, | |
| "learning_rate": 7.1120000000000015e-06, | |
| "loss": 0.6271, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 1.8599073886871338, | |
| "learning_rate": 7.192e-06, | |
| "loss": 0.6212, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.1456, | |
| "grad_norm": 1.9128047227859497, | |
| "learning_rate": 7.272e-06, | |
| "loss": 0.6307, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.1472, | |
| "grad_norm": 1.6071052551269531, | |
| "learning_rate": 7.352e-06, | |
| "loss": 0.6244, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.1488, | |
| "grad_norm": 1.6631546020507812, | |
| "learning_rate": 7.432e-06, | |
| "loss": 0.547, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.1504, | |
| "grad_norm": 1.9678213596343994, | |
| "learning_rate": 7.512e-06, | |
| "loss": 0.6259, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.152, | |
| "grad_norm": 1.5648242235183716, | |
| "learning_rate": 7.592e-06, | |
| "loss": 0.5227, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.1536, | |
| "grad_norm": 1.6135765314102173, | |
| "learning_rate": 7.672e-06, | |
| "loss": 0.5195, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.1552, | |
| "grad_norm": 1.6523627042770386, | |
| "learning_rate": 7.752000000000001e-06, | |
| "loss": 0.5548, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.1568, | |
| "grad_norm": 1.6313607692718506, | |
| "learning_rate": 7.832e-06, | |
| "loss": 0.5738, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.1584, | |
| "grad_norm": 1.9010906219482422, | |
| "learning_rate": 7.912000000000001e-06, | |
| "loss": 0.6992, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.503695011138916, | |
| "learning_rate": 7.992e-06, | |
| "loss": 0.5678, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.1616, | |
| "grad_norm": 2.0210046768188477, | |
| "learning_rate": 8.072000000000002e-06, | |
| "loss": 0.5056, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.1632, | |
| "grad_norm": 1.6380538940429688, | |
| "learning_rate": 8.152000000000001e-06, | |
| "loss": 0.5345, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.1648, | |
| "grad_norm": 1.6869913339614868, | |
| "learning_rate": 8.232000000000002e-06, | |
| "loss": 0.5852, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.1664, | |
| "grad_norm": 2.066523551940918, | |
| "learning_rate": 8.312000000000001e-06, | |
| "loss": 0.5864, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.168, | |
| "grad_norm": 1.2786672115325928, | |
| "learning_rate": 8.392e-06, | |
| "loss": 0.4961, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.1696, | |
| "grad_norm": 1.6929494142532349, | |
| "learning_rate": 8.472e-06, | |
| "loss": 0.5596, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.1712, | |
| "grad_norm": 1.468377947807312, | |
| "learning_rate": 8.552e-06, | |
| "loss": 0.5412, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.1728, | |
| "grad_norm": 1.5471081733703613, | |
| "learning_rate": 8.632e-06, | |
| "loss": 0.5786, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.1744, | |
| "grad_norm": 1.6691291332244873, | |
| "learning_rate": 8.712e-06, | |
| "loss": 0.5538, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 1.9355674982070923, | |
| "learning_rate": 8.792e-06, | |
| "loss": 0.6015, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.1776, | |
| "grad_norm": 1.7869125604629517, | |
| "learning_rate": 8.872e-06, | |
| "loss": 0.612, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.1792, | |
| "grad_norm": 1.8887531757354736, | |
| "learning_rate": 8.952e-06, | |
| "loss": 0.5543, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.1808, | |
| "grad_norm": 1.5424280166625977, | |
| "learning_rate": 9.032000000000001e-06, | |
| "loss": 0.586, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.1824, | |
| "grad_norm": 1.4026209115982056, | |
| "learning_rate": 9.112e-06, | |
| "loss": 0.5623, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.184, | |
| "grad_norm": 1.462170958518982, | |
| "learning_rate": 9.192000000000001e-06, | |
| "loss": 0.5715, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.1856, | |
| "grad_norm": 1.4771801233291626, | |
| "learning_rate": 9.272e-06, | |
| "loss": 0.5798, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.1872, | |
| "grad_norm": 1.661705732345581, | |
| "learning_rate": 9.352000000000001e-06, | |
| "loss": 0.5599, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.1888, | |
| "grad_norm": 1.5441359281539917, | |
| "learning_rate": 9.432e-06, | |
| "loss": 0.5416, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.1904, | |
| "grad_norm": 1.6222203969955444, | |
| "learning_rate": 9.512000000000001e-06, | |
| "loss": 0.5691, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 1.685941457748413, | |
| "learning_rate": 9.592e-06, | |
| "loss": 0.5679, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.1936, | |
| "grad_norm": 1.648575782775879, | |
| "learning_rate": 9.672e-06, | |
| "loss": 0.6112, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.1952, | |
| "grad_norm": 1.5603736639022827, | |
| "learning_rate": 9.752e-06, | |
| "loss": 0.5354, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.1968, | |
| "grad_norm": 1.7889364957809448, | |
| "learning_rate": 9.832e-06, | |
| "loss": 0.5844, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.1984, | |
| "grad_norm": 1.8155652284622192, | |
| "learning_rate": 9.912000000000001e-06, | |
| "loss": 0.59, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.8027434349060059, | |
| "learning_rate": 9.992e-06, | |
| "loss": 0.5451, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.2016, | |
| "grad_norm": 1.6380250453948975, | |
| "learning_rate": 9.999984208641271e-06, | |
| "loss": 0.5831, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.2032, | |
| "grad_norm": 1.7677627801895142, | |
| "learning_rate": 9.99992962135644e-06, | |
| "loss": 0.5177, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.2048, | |
| "grad_norm": 1.821156620979309, | |
| "learning_rate": 9.99983604361604e-06, | |
| "loss": 0.6036, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.2064, | |
| "grad_norm": 1.567445158958435, | |
| "learning_rate": 9.999703476149808e-06, | |
| "loss": 0.5641, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 1.5950522422790527, | |
| "learning_rate": 9.999531919991538e-06, | |
| "loss": 0.5777, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.2096, | |
| "grad_norm": 1.893674373626709, | |
| "learning_rate": 9.999321376479054e-06, | |
| "loss": 0.5792, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.2112, | |
| "grad_norm": 1.759052038192749, | |
| "learning_rate": 9.999071847254219e-06, | |
| "loss": 0.5363, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.2128, | |
| "grad_norm": 1.8278294801712036, | |
| "learning_rate": 9.998783334262911e-06, | |
| "loss": 0.5621, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.2144, | |
| "grad_norm": 1.4937176704406738, | |
| "learning_rate": 9.998455839755013e-06, | |
| "loss": 0.5813, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.216, | |
| "grad_norm": 1.5039318799972534, | |
| "learning_rate": 9.998089366284392e-06, | |
| "loss": 0.5663, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.2176, | |
| "grad_norm": 1.6976267099380493, | |
| "learning_rate": 9.99768391670888e-06, | |
| "loss": 0.5151, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.2192, | |
| "grad_norm": 1.809712290763855, | |
| "learning_rate": 9.997239494190258e-06, | |
| "loss": 0.6011, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.2208, | |
| "grad_norm": 1.6464375257492065, | |
| "learning_rate": 9.996756102194222e-06, | |
| "loss": 0.5821, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.2224, | |
| "grad_norm": 1.2977737188339233, | |
| "learning_rate": 9.996233744490356e-06, | |
| "loss": 0.6374, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 1.537225365638733, | |
| "learning_rate": 9.995672425152115e-06, | |
| "loss": 0.5528, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.2256, | |
| "grad_norm": 1.685582160949707, | |
| "learning_rate": 9.995072148556776e-06, | |
| "loss": 0.6102, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.2272, | |
| "grad_norm": 1.774318814277649, | |
| "learning_rate": 9.994432919385417e-06, | |
| "loss": 0.6189, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.2288, | |
| "grad_norm": 1.9629392623901367, | |
| "learning_rate": 9.993754742622879e-06, | |
| "loss": 0.5906, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.2304, | |
| "grad_norm": 1.435943603515625, | |
| "learning_rate": 9.993037623557716e-06, | |
| "loss": 0.5964, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.232, | |
| "grad_norm": 1.634237289428711, | |
| "learning_rate": 9.99228156778217e-06, | |
| "loss": 0.5768, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.2336, | |
| "grad_norm": 1.7794053554534912, | |
| "learning_rate": 9.991486581192115e-06, | |
| "loss": 0.588, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.2352, | |
| "grad_norm": 1.917445421218872, | |
| "learning_rate": 9.990652669987016e-06, | |
| "loss": 0.4861, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.2368, | |
| "grad_norm": 1.8242907524108887, | |
| "learning_rate": 9.989779840669878e-06, | |
| "loss": 0.5735, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.2384, | |
| "grad_norm": 1.7687532901763916, | |
| "learning_rate": 9.988868100047203e-06, | |
| "loss": 0.57, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.211827039718628, | |
| "learning_rate": 9.987917455228924e-06, | |
| "loss": 0.5563, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.2416, | |
| "grad_norm": 1.607712984085083, | |
| "learning_rate": 9.986927913628361e-06, | |
| "loss": 0.5346, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.2432, | |
| "grad_norm": 1.6352814435958862, | |
| "learning_rate": 9.98589948296216e-06, | |
| "loss": 0.5437, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.2448, | |
| "grad_norm": 1.4810998439788818, | |
| "learning_rate": 9.98483217125023e-06, | |
| "loss": 0.5934, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.2464, | |
| "grad_norm": 1.7124179601669312, | |
| "learning_rate": 9.983725986815682e-06, | |
| "loss": 0.6061, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.248, | |
| "grad_norm": 1.7990316152572632, | |
| "learning_rate": 9.98258093828476e-06, | |
| "loss": 0.5456, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.2496, | |
| "grad_norm": 1.4449198246002197, | |
| "learning_rate": 9.981397034586789e-06, | |
| "loss": 0.562, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.2512, | |
| "grad_norm": 2.1901655197143555, | |
| "learning_rate": 9.980174284954084e-06, | |
| "loss": 0.572, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.2528, | |
| "grad_norm": 1.9532365798950195, | |
| "learning_rate": 9.978912698921892e-06, | |
| "loss": 0.607, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.2544, | |
| "grad_norm": 1.8185838460922241, | |
| "learning_rate": 9.977612286328317e-06, | |
| "loss": 0.5713, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 1.7005282640457153, | |
| "learning_rate": 9.976273057314236e-06, | |
| "loss": 0.5656, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.2576, | |
| "grad_norm": 1.3290575742721558, | |
| "learning_rate": 9.974895022323226e-06, | |
| "loss": 0.5968, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.2592, | |
| "grad_norm": 1.6674035787582397, | |
| "learning_rate": 9.97347819210148e-06, | |
| "loss": 0.5335, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.2608, | |
| "grad_norm": 1.6228221654891968, | |
| "learning_rate": 9.972022577697726e-06, | |
| "loss": 0.5403, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.2624, | |
| "grad_norm": 1.4433850049972534, | |
| "learning_rate": 9.970528190463136e-06, | |
| "loss": 0.5798, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.264, | |
| "grad_norm": 1.5653085708618164, | |
| "learning_rate": 9.968995042051244e-06, | |
| "loss": 0.5216, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.2656, | |
| "grad_norm": 1.4027546644210815, | |
| "learning_rate": 9.967423144417847e-06, | |
| "loss": 0.5884, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.2672, | |
| "grad_norm": 1.84309720993042, | |
| "learning_rate": 9.965812509820918e-06, | |
| "loss": 0.5949, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.2688, | |
| "grad_norm": 1.342220664024353, | |
| "learning_rate": 9.964163150820512e-06, | |
| "loss": 0.5257, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.2704, | |
| "grad_norm": 1.6294057369232178, | |
| "learning_rate": 9.962475080278662e-06, | |
| "loss": 0.6513, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 1.8234083652496338, | |
| "learning_rate": 9.960748311359278e-06, | |
| "loss": 0.5903, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.2736, | |
| "grad_norm": 1.5385366678237915, | |
| "learning_rate": 9.958982857528053e-06, | |
| "loss": 0.5791, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.2752, | |
| "grad_norm": 1.834450602531433, | |
| "learning_rate": 9.957178732552348e-06, | |
| "loss": 0.5974, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.2768, | |
| "grad_norm": 1.5176982879638672, | |
| "learning_rate": 9.955335950501097e-06, | |
| "loss": 0.5505, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.2784, | |
| "grad_norm": 1.6913715600967407, | |
| "learning_rate": 9.95345452574468e-06, | |
| "loss": 0.5666, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.4752143621444702, | |
| "learning_rate": 9.951534472954826e-06, | |
| "loss": 0.5373, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.2816, | |
| "grad_norm": 1.6566197872161865, | |
| "learning_rate": 9.949575807104494e-06, | |
| "loss": 0.6057, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.2832, | |
| "grad_norm": 1.230667233467102, | |
| "learning_rate": 9.947578543467755e-06, | |
| "loss": 0.5894, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.2848, | |
| "grad_norm": 1.6412286758422852, | |
| "learning_rate": 9.945542697619667e-06, | |
| "loss": 0.5867, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.2864, | |
| "grad_norm": 1.3887630701065063, | |
| "learning_rate": 9.943468285436171e-06, | |
| "loss": 0.5151, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 1.5172789096832275, | |
| "learning_rate": 9.941355323093944e-06, | |
| "loss": 0.5108, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.2896, | |
| "grad_norm": 1.7635471820831299, | |
| "learning_rate": 9.939203827070296e-06, | |
| "loss": 0.5475, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.2912, | |
| "grad_norm": 1.4377206563949585, | |
| "learning_rate": 9.937013814143021e-06, | |
| "loss": 0.5767, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.2928, | |
| "grad_norm": 1.427243709564209, | |
| "learning_rate": 9.934785301390282e-06, | |
| "loss": 0.6417, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.2944, | |
| "grad_norm": 1.7837238311767578, | |
| "learning_rate": 9.93251830619047e-06, | |
| "loss": 0.5708, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.296, | |
| "grad_norm": 1.378288984298706, | |
| "learning_rate": 9.930212846222065e-06, | |
| "loss": 0.6123, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.2976, | |
| "grad_norm": 1.5270707607269287, | |
| "learning_rate": 9.927868939463511e-06, | |
| "loss": 0.594, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.2992, | |
| "grad_norm": 1.6286526918411255, | |
| "learning_rate": 9.925486604193064e-06, | |
| "loss": 0.5353, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.3008, | |
| "grad_norm": 1.6687753200531006, | |
| "learning_rate": 9.92306585898865e-06, | |
| "loss": 0.588, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.3024, | |
| "grad_norm": 1.7869555950164795, | |
| "learning_rate": 9.920606722727726e-06, | |
| "loss": 0.5821, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 1.472643494606018, | |
| "learning_rate": 9.918109214587134e-06, | |
| "loss": 0.6336, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.3056, | |
| "grad_norm": 1.8117313385009766, | |
| "learning_rate": 9.915573354042943e-06, | |
| "loss": 0.5963, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.3072, | |
| "grad_norm": 1.7488961219787598, | |
| "learning_rate": 9.9129991608703e-06, | |
| "loss": 0.5476, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.3088, | |
| "grad_norm": 1.6109665632247925, | |
| "learning_rate": 9.910386655143285e-06, | |
| "loss": 0.5461, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.3104, | |
| "grad_norm": 1.6546939611434937, | |
| "learning_rate": 9.90773585723474e-06, | |
| "loss": 0.571, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.312, | |
| "grad_norm": 1.8168220520019531, | |
| "learning_rate": 9.905046787816118e-06, | |
| "loss": 0.5555, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.3136, | |
| "grad_norm": 1.5799616575241089, | |
| "learning_rate": 9.902319467857326e-06, | |
| "loss": 0.5598, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.3152, | |
| "grad_norm": 1.7626128196716309, | |
| "learning_rate": 9.89955391862655e-06, | |
| "loss": 0.5598, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.3168, | |
| "grad_norm": 1.363038420677185, | |
| "learning_rate": 9.8967501616901e-06, | |
| "loss": 0.6044, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.3184, | |
| "grad_norm": 1.8485779762268066, | |
| "learning_rate": 9.893908218912237e-06, | |
| "loss": 0.5792, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.7061164379119873, | |
| "learning_rate": 9.891028112454998e-06, | |
| "loss": 0.5656, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.3216, | |
| "grad_norm": 1.5981711149215698, | |
| "learning_rate": 9.888109864778036e-06, | |
| "loss": 0.5611, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.3232, | |
| "grad_norm": 1.6279652118682861, | |
| "learning_rate": 9.88515349863843e-06, | |
| "loss": 0.5797, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.3248, | |
| "grad_norm": 1.2771573066711426, | |
| "learning_rate": 9.882159037090517e-06, | |
| "loss": 0.5236, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.3264, | |
| "grad_norm": 1.3223637342453003, | |
| "learning_rate": 9.879126503485709e-06, | |
| "loss": 0.5421, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.328, | |
| "grad_norm": 1.6403645277023315, | |
| "learning_rate": 9.876055921472316e-06, | |
| "loss": 0.5581, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.3296, | |
| "grad_norm": 1.3417850732803345, | |
| "learning_rate": 9.872947314995348e-06, | |
| "loss": 0.5385, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.3312, | |
| "grad_norm": 1.4895073175430298, | |
| "learning_rate": 9.869800708296347e-06, | |
| "loss": 0.6235, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.3328, | |
| "grad_norm": 1.45980966091156, | |
| "learning_rate": 9.866616125913182e-06, | |
| "loss": 0.5474, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.3344, | |
| "grad_norm": 1.547318696975708, | |
| "learning_rate": 9.863393592679867e-06, | |
| "loss": 0.5732, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 1.6519551277160645, | |
| "learning_rate": 9.860133133726364e-06, | |
| "loss": 0.5773, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.3376, | |
| "grad_norm": 1.848542332649231, | |
| "learning_rate": 9.856834774478385e-06, | |
| "loss": 0.5267, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.3392, | |
| "grad_norm": 1.5022307634353638, | |
| "learning_rate": 9.853498540657201e-06, | |
| "loss": 0.5271, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.3408, | |
| "grad_norm": 1.7650456428527832, | |
| "learning_rate": 9.850124458279429e-06, | |
| "loss": 0.6081, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.3424, | |
| "grad_norm": 1.551113486289978, | |
| "learning_rate": 9.846712553656845e-06, | |
| "loss": 0.5887, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.344, | |
| "grad_norm": 1.6486953496932983, | |
| "learning_rate": 9.843262853396164e-06, | |
| "loss": 0.5743, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.3456, | |
| "grad_norm": 1.6665668487548828, | |
| "learning_rate": 9.839775384398846e-06, | |
| "loss": 0.5836, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.3472, | |
| "grad_norm": 1.49594247341156, | |
| "learning_rate": 9.83625017386087e-06, | |
| "loss": 0.5771, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.3488, | |
| "grad_norm": 1.7653658390045166, | |
| "learning_rate": 9.83268724927254e-06, | |
| "loss": 0.5331, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.3504, | |
| "grad_norm": 1.6646757125854492, | |
| "learning_rate": 9.829086638418252e-06, | |
| "loss": 0.5424, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 1.4093458652496338, | |
| "learning_rate": 9.825448369376298e-06, | |
| "loss": 0.5195, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.3536, | |
| "grad_norm": 1.4820239543914795, | |
| "learning_rate": 9.82177247051863e-06, | |
| "loss": 0.5263, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.3552, | |
| "grad_norm": 1.5001472234725952, | |
| "learning_rate": 9.818058970510642e-06, | |
| "loss": 0.5698, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.3568, | |
| "grad_norm": 2.0817739963531494, | |
| "learning_rate": 9.814307898310957e-06, | |
| "loss": 0.5645, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.3584, | |
| "grad_norm": 1.726280927658081, | |
| "learning_rate": 9.810519283171189e-06, | |
| "loss": 0.5639, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.384364128112793, | |
| "learning_rate": 9.806693154635719e-06, | |
| "loss": 0.5904, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.3616, | |
| "grad_norm": 1.5921564102172852, | |
| "learning_rate": 9.802829542541463e-06, | |
| "loss": 0.5337, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.3632, | |
| "grad_norm": 1.600022792816162, | |
| "learning_rate": 9.798928477017651e-06, | |
| "loss": 0.561, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.3648, | |
| "grad_norm": 1.393886685371399, | |
| "learning_rate": 9.794989988485571e-06, | |
| "loss": 0.5616, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.3664, | |
| "grad_norm": 1.7256858348846436, | |
| "learning_rate": 9.791014107658348e-06, | |
| "loss": 0.5973, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 1.4691014289855957, | |
| "learning_rate": 9.787000865540698e-06, | |
| "loss": 0.5381, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.3696, | |
| "grad_norm": 1.3663450479507446, | |
| "learning_rate": 9.782950293428695e-06, | |
| "loss": 0.5824, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.3712, | |
| "grad_norm": 1.7860890626907349, | |
| "learning_rate": 9.778862422909507e-06, | |
| "loss": 0.5901, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.3728, | |
| "grad_norm": 1.240251898765564, | |
| "learning_rate": 9.774737285861176e-06, | |
| "loss": 0.5397, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.3744, | |
| "grad_norm": 1.6613671779632568, | |
| "learning_rate": 9.770574914452343e-06, | |
| "loss": 0.6246, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.376, | |
| "grad_norm": 1.859451413154602, | |
| "learning_rate": 9.76637534114202e-06, | |
| "loss": 0.5471, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.3776, | |
| "grad_norm": 1.439486026763916, | |
| "learning_rate": 9.762138598679324e-06, | |
| "loss": 0.5848, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.3792, | |
| "grad_norm": 1.7923952341079712, | |
| "learning_rate": 9.757864720103222e-06, | |
| "loss": 0.6249, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.3808, | |
| "grad_norm": 1.2177571058273315, | |
| "learning_rate": 9.753553738742278e-06, | |
| "loss": 0.5704, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.3824, | |
| "grad_norm": 1.4877334833145142, | |
| "learning_rate": 9.74920568821439e-06, | |
| "loss": 0.5569, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 1.552570104598999, | |
| "learning_rate": 9.74482060242653e-06, | |
| "loss": 0.5629, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.3856, | |
| "grad_norm": 1.6734225749969482, | |
| "learning_rate": 9.74039851557448e-06, | |
| "loss": 0.5617, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.3872, | |
| "grad_norm": 1.688307762145996, | |
| "learning_rate": 9.735939462142558e-06, | |
| "loss": 0.5872, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.3888, | |
| "grad_norm": 1.4431356191635132, | |
| "learning_rate": 9.73144347690336e-06, | |
| "loss": 0.541, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.3904, | |
| "grad_norm": 1.5212606191635132, | |
| "learning_rate": 9.726910594917482e-06, | |
| "loss": 0.5338, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.392, | |
| "grad_norm": 1.899236798286438, | |
| "learning_rate": 9.72234085153325e-06, | |
| "loss": 0.5844, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.3936, | |
| "grad_norm": 1.5290489196777344, | |
| "learning_rate": 9.717734282386439e-06, | |
| "loss": 0.5983, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.3952, | |
| "grad_norm": 1.4352022409439087, | |
| "learning_rate": 9.713090923399999e-06, | |
| "loss": 0.5471, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.3968, | |
| "grad_norm": 1.5614298582077026, | |
| "learning_rate": 9.70841081078378e-06, | |
| "loss": 0.5935, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.3984, | |
| "grad_norm": 1.9216426610946655, | |
| "learning_rate": 9.703693981034236e-06, | |
| "loss": 0.5835, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.855220079421997, | |
| "learning_rate": 9.698940470934158e-06, | |
| "loss": 0.6442, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.4016, | |
| "grad_norm": 1.7325100898742676, | |
| "learning_rate": 9.694150317552367e-06, | |
| "loss": 0.5652, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.4032, | |
| "grad_norm": 1.7569974660873413, | |
| "learning_rate": 9.689323558243446e-06, | |
| "loss": 0.5846, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.4048, | |
| "grad_norm": 1.4056826829910278, | |
| "learning_rate": 9.68446023064743e-06, | |
| "loss": 0.5341, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.4064, | |
| "grad_norm": 1.4743527173995972, | |
| "learning_rate": 9.679560372689527e-06, | |
| "loss": 0.5319, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.408, | |
| "grad_norm": 1.6835367679595947, | |
| "learning_rate": 9.674624022579814e-06, | |
| "loss": 0.6089, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.4096, | |
| "grad_norm": 1.5585544109344482, | |
| "learning_rate": 9.669651218812938e-06, | |
| "loss": 0.5557, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.4112, | |
| "grad_norm": 1.4717180728912354, | |
| "learning_rate": 9.664642000167825e-06, | |
| "loss": 0.5599, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.4128, | |
| "grad_norm": 1.4934587478637695, | |
| "learning_rate": 9.659596405707366e-06, | |
| "loss": 0.5775, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.4144, | |
| "grad_norm": 1.437104344367981, | |
| "learning_rate": 9.65451447477812e-06, | |
| "loss": 0.5256, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 1.7445427179336548, | |
| "learning_rate": 9.649396247010008e-06, | |
| "loss": 0.6085, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.4176, | |
| "grad_norm": 1.6245182752609253, | |
| "learning_rate": 9.644241762315995e-06, | |
| "loss": 0.5698, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.4192, | |
| "grad_norm": 1.8400903940200806, | |
| "learning_rate": 9.639051060891789e-06, | |
| "loss": 0.6051, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.4208, | |
| "grad_norm": 1.3932462930679321, | |
| "learning_rate": 9.633824183215525e-06, | |
| "loss": 0.5919, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.4224, | |
| "grad_norm": 1.249548077583313, | |
| "learning_rate": 9.62856117004744e-06, | |
| "loss": 0.5209, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.424, | |
| "grad_norm": 1.3328934907913208, | |
| "learning_rate": 9.623262062429573e-06, | |
| "loss": 0.5492, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.4256, | |
| "grad_norm": 1.536808967590332, | |
| "learning_rate": 9.617926901685427e-06, | |
| "loss": 0.5482, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.4272, | |
| "grad_norm": 1.3934202194213867, | |
| "learning_rate": 9.612555729419656e-06, | |
| "loss": 0.487, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.4288, | |
| "grad_norm": 1.461133599281311, | |
| "learning_rate": 9.607148587517746e-06, | |
| "loss": 0.5582, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.4304, | |
| "grad_norm": 1.5878986120224, | |
| "learning_rate": 9.601705518145668e-06, | |
| "loss": 0.5984, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 1.1981624364852905, | |
| "learning_rate": 9.596226563749575e-06, | |
| "loss": 0.5168, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.4336, | |
| "grad_norm": 1.92930269241333, | |
| "learning_rate": 9.590711767055454e-06, | |
| "loss": 0.5535, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.4352, | |
| "grad_norm": 1.257930874824524, | |
| "learning_rate": 9.585161171068796e-06, | |
| "loss": 0.5662, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.4368, | |
| "grad_norm": 1.8861515522003174, | |
| "learning_rate": 9.579574819074263e-06, | |
| "loss": 0.5904, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.4384, | |
| "grad_norm": 1.3617736101150513, | |
| "learning_rate": 9.573952754635351e-06, | |
| "loss": 0.5486, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.3279486894607544, | |
| "learning_rate": 9.568295021594049e-06, | |
| "loss": 0.5692, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.4416, | |
| "grad_norm": 1.6831086874008179, | |
| "learning_rate": 9.562601664070495e-06, | |
| "loss": 0.582, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.4432, | |
| "grad_norm": 1.6577680110931396, | |
| "learning_rate": 9.556872726462634e-06, | |
| "loss": 0.5484, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.4448, | |
| "grad_norm": 1.7526309490203857, | |
| "learning_rate": 9.55110825344587e-06, | |
| "loss": 0.5535, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 0.4464, | |
| "grad_norm": 1.4835820198059082, | |
| "learning_rate": 9.545308289972727e-06, | |
| "loss": 0.5645, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 1.415121078491211, | |
| "learning_rate": 9.539472881272483e-06, | |
| "loss": 0.54, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.4496, | |
| "grad_norm": 1.2535109519958496, | |
| "learning_rate": 9.533602072850826e-06, | |
| "loss": 0.5618, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 0.4512, | |
| "grad_norm": 1.2987178564071655, | |
| "learning_rate": 9.527695910489498e-06, | |
| "loss": 0.5615, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.4528, | |
| "grad_norm": 1.6587828397750854, | |
| "learning_rate": 9.521754440245944e-06, | |
| "loss": 0.5689, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 0.4544, | |
| "grad_norm": 1.4573450088500977, | |
| "learning_rate": 9.515777708452938e-06, | |
| "loss": 0.4813, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 0.456, | |
| "grad_norm": 1.6511763334274292, | |
| "learning_rate": 9.50976576171824e-06, | |
| "loss": 0.5113, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.4576, | |
| "grad_norm": 1.8001500368118286, | |
| "learning_rate": 9.503718646924211e-06, | |
| "loss": 0.5191, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 0.4592, | |
| "grad_norm": 1.5531097650527954, | |
| "learning_rate": 9.497636411227476e-06, | |
| "loss": 0.5677, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 0.4608, | |
| "grad_norm": 1.5873191356658936, | |
| "learning_rate": 9.491519102058523e-06, | |
| "loss": 0.5438, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.4624, | |
| "grad_norm": 1.5620393753051758, | |
| "learning_rate": 9.485366767121363e-06, | |
| "loss": 0.657, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 1.4177587032318115, | |
| "learning_rate": 9.479179454393135e-06, | |
| "loss": 0.5192, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.4656, | |
| "grad_norm": 1.4712775945663452, | |
| "learning_rate": 9.472957212123751e-06, | |
| "loss": 0.5684, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.4672, | |
| "grad_norm": 1.5090250968933105, | |
| "learning_rate": 9.466700088835505e-06, | |
| "loss": 0.5498, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 0.4688, | |
| "grad_norm": 1.6817193031311035, | |
| "learning_rate": 9.460408133322698e-06, | |
| "loss": 0.5468, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 0.4704, | |
| "grad_norm": 1.770856499671936, | |
| "learning_rate": 9.454081394651267e-06, | |
| "loss": 0.5929, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.472, | |
| "grad_norm": 1.357238531112671, | |
| "learning_rate": 9.447719922158391e-06, | |
| "loss": 0.5393, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 0.4736, | |
| "grad_norm": 1.3041408061981201, | |
| "learning_rate": 9.441323765452107e-06, | |
| "loss": 0.5065, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 0.4752, | |
| "grad_norm": 1.5253078937530518, | |
| "learning_rate": 9.434892974410932e-06, | |
| "loss": 0.5939, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.4768, | |
| "grad_norm": 1.6354926824569702, | |
| "learning_rate": 9.428427599183467e-06, | |
| "loss": 0.5497, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 0.4784, | |
| "grad_norm": 1.2458250522613525, | |
| "learning_rate": 9.421927690188006e-06, | |
| "loss": 0.5633, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.6764708757400513, | |
| "learning_rate": 9.415393298112145e-06, | |
| "loss": 0.5424, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.4816, | |
| "grad_norm": 1.4938427209854126, | |
| "learning_rate": 9.408824473912387e-06, | |
| "loss": 0.5356, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 0.4832, | |
| "grad_norm": 1.4013876914978027, | |
| "learning_rate": 9.402221268813741e-06, | |
| "loss": 0.53, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 0.4848, | |
| "grad_norm": 1.6770672798156738, | |
| "learning_rate": 9.395583734309327e-06, | |
| "loss": 0.5724, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.4864, | |
| "grad_norm": 1.329093098640442, | |
| "learning_rate": 9.388911922159973e-06, | |
| "loss": 0.4879, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 0.488, | |
| "grad_norm": 1.3167623281478882, | |
| "learning_rate": 9.38220588439381e-06, | |
| "loss": 0.6006, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 0.4896, | |
| "grad_norm": 1.5179733037948608, | |
| "learning_rate": 9.37546567330587e-06, | |
| "loss": 0.5293, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.4912, | |
| "grad_norm": 1.569939136505127, | |
| "learning_rate": 9.36869134145767e-06, | |
| "loss": 0.5343, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 0.4928, | |
| "grad_norm": 1.4918403625488281, | |
| "learning_rate": 9.36188294167681e-06, | |
| "loss": 0.4913, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 0.4944, | |
| "grad_norm": 1.3183833360671997, | |
| "learning_rate": 9.35504052705656e-06, | |
| "loss": 0.6321, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 1.3474185466766357, | |
| "learning_rate": 9.348164150955448e-06, | |
| "loss": 0.5296, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.4976, | |
| "grad_norm": 1.415200114250183, | |
| "learning_rate": 9.34125386699683e-06, | |
| "loss": 0.526, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 0.4992, | |
| "grad_norm": 1.3231251239776611, | |
| "learning_rate": 9.33430972906849e-06, | |
| "loss": 0.5175, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.5008, | |
| "grad_norm": 1.3747608661651611, | |
| "learning_rate": 9.327331791322214e-06, | |
| "loss": 0.5181, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 0.5024, | |
| "grad_norm": 1.23919677734375, | |
| "learning_rate": 9.320320108173359e-06, | |
| "loss": 0.5243, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 0.504, | |
| "grad_norm": 1.5528453588485718, | |
| "learning_rate": 9.31327473430044e-06, | |
| "loss": 0.6147, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.5056, | |
| "grad_norm": 1.2988446950912476, | |
| "learning_rate": 9.306195724644695e-06, | |
| "loss": 0.5581, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 0.5072, | |
| "grad_norm": 1.5376354455947876, | |
| "learning_rate": 9.299083134409667e-06, | |
| "loss": 0.5091, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 0.5088, | |
| "grad_norm": 1.4026676416397095, | |
| "learning_rate": 9.291937019060762e-06, | |
| "loss": 0.5799, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.5104, | |
| "grad_norm": 1.3348209857940674, | |
| "learning_rate": 9.284757434324823e-06, | |
| "loss": 0.5778, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 1.3523982763290405, | |
| "learning_rate": 9.277544436189693e-06, | |
| "loss": 0.5468, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.5136, | |
| "grad_norm": 1.6282680034637451, | |
| "learning_rate": 9.270298080903782e-06, | |
| "loss": 0.6032, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.5152, | |
| "grad_norm": 1.4806174039840698, | |
| "learning_rate": 9.263018424975624e-06, | |
| "loss": 0.5448, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 0.5168, | |
| "grad_norm": 1.825416922569275, | |
| "learning_rate": 9.255705525173437e-06, | |
| "loss": 0.493, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 0.5184, | |
| "grad_norm": 1.5116595029830933, | |
| "learning_rate": 9.248359438524683e-06, | |
| "loss": 0.5225, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.5296186208724976, | |
| "learning_rate": 9.24098022231562e-06, | |
| "loss": 0.5504, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 0.5216, | |
| "grad_norm": 1.2617089748382568, | |
| "learning_rate": 9.233567934090864e-06, | |
| "loss": 0.5151, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 0.5232, | |
| "grad_norm": 1.7066289186477661, | |
| "learning_rate": 9.226122631652921e-06, | |
| "loss": 0.5836, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.5248, | |
| "grad_norm": 1.4581398963928223, | |
| "learning_rate": 9.218644373061759e-06, | |
| "loss": 0.5923, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 0.5264, | |
| "grad_norm": 1.5108258724212646, | |
| "learning_rate": 9.211133216634339e-06, | |
| "loss": 0.554, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 1.6794824600219727, | |
| "learning_rate": 9.203589220944166e-06, | |
| "loss": 0.5499, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.5296, | |
| "grad_norm": 1.260703206062317, | |
| "learning_rate": 9.196012444820839e-06, | |
| "loss": 0.5716, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 0.5312, | |
| "grad_norm": 1.4993669986724854, | |
| "learning_rate": 9.188402947349575e-06, | |
| "loss": 0.5963, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 0.5328, | |
| "grad_norm": 1.601504921913147, | |
| "learning_rate": 9.180760787870766e-06, | |
| "loss": 0.5565, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.5344, | |
| "grad_norm": 1.6528475284576416, | |
| "learning_rate": 9.173086025979507e-06, | |
| "loss": 0.5614, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 0.536, | |
| "grad_norm": 1.4456391334533691, | |
| "learning_rate": 9.165378721525133e-06, | |
| "loss": 0.523, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 0.5376, | |
| "grad_norm": 1.49320650100708, | |
| "learning_rate": 9.15763893461075e-06, | |
| "loss": 0.5239, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.5392, | |
| "grad_norm": 1.6992675065994263, | |
| "learning_rate": 9.149866725592777e-06, | |
| "loss": 0.6091, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 0.5408, | |
| "grad_norm": 1.5509899854660034, | |
| "learning_rate": 9.142062155080455e-06, | |
| "loss": 0.5403, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 0.5424, | |
| "grad_norm": 1.3177284002304077, | |
| "learning_rate": 9.134225283935395e-06, | |
| "loss": 0.5235, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 1.6410481929779053, | |
| "learning_rate": 9.126356173271092e-06, | |
| "loss": 0.5709, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.5456, | |
| "grad_norm": 1.2441757917404175, | |
| "learning_rate": 9.118454884452452e-06, | |
| "loss": 0.5719, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 0.5472, | |
| "grad_norm": 1.3330907821655273, | |
| "learning_rate": 9.110521479095314e-06, | |
| "loss": 0.5522, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.5488, | |
| "grad_norm": 1.3912606239318848, | |
| "learning_rate": 9.102556019065962e-06, | |
| "loss": 0.5927, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 0.5504, | |
| "grad_norm": 1.4834200143814087, | |
| "learning_rate": 9.094558566480659e-06, | |
| "loss": 0.5303, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 0.552, | |
| "grad_norm": 1.6571238040924072, | |
| "learning_rate": 9.086529183705144e-06, | |
| "loss": 0.5578, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.5536, | |
| "grad_norm": 1.272433876991272, | |
| "learning_rate": 9.078467933354156e-06, | |
| "loss": 0.4896, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 0.5552, | |
| "grad_norm": 1.6238347291946411, | |
| "learning_rate": 9.070374878290946e-06, | |
| "loss": 0.5722, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 0.5568, | |
| "grad_norm": 1.5055066347122192, | |
| "learning_rate": 9.062250081626784e-06, | |
| "loss": 0.572, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.5584, | |
| "grad_norm": 1.6481482982635498, | |
| "learning_rate": 9.054093606720464e-06, | |
| "loss": 0.5319, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.619781494140625, | |
| "learning_rate": 9.045905517177817e-06, | |
| "loss": 0.5427, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.5616, | |
| "grad_norm": 1.3197731971740723, | |
| "learning_rate": 9.037685876851211e-06, | |
| "loss": 0.5208, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.5632, | |
| "grad_norm": 1.4628829956054688, | |
| "learning_rate": 9.02943474983905e-06, | |
| "loss": 0.541, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 0.5648, | |
| "grad_norm": 1.376360297203064, | |
| "learning_rate": 9.021152200485283e-06, | |
| "loss": 0.5768, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 0.5664, | |
| "grad_norm": 1.3545571565628052, | |
| "learning_rate": 9.01283829337889e-06, | |
| "loss": 0.4855, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.568, | |
| "grad_norm": 1.2771652936935425, | |
| "learning_rate": 9.004493093353394e-06, | |
| "loss": 0.5163, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 0.5696, | |
| "grad_norm": 1.6206740140914917, | |
| "learning_rate": 8.996116665486337e-06, | |
| "loss": 0.596, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 0.5712, | |
| "grad_norm": 2.405425548553467, | |
| "learning_rate": 8.987709075098786e-06, | |
| "loss": 0.5715, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.5728, | |
| "grad_norm": 1.6862409114837646, | |
| "learning_rate": 8.97927038775482e-06, | |
| "loss": 0.5738, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 0.5744, | |
| "grad_norm": 1.4156520366668701, | |
| "learning_rate": 8.970800669261022e-06, | |
| "loss": 0.5587, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 1.4859108924865723, | |
| "learning_rate": 8.962299985665955e-06, | |
| "loss": 0.5624, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.5776, | |
| "grad_norm": 1.5429044961929321, | |
| "learning_rate": 8.953768403259655e-06, | |
| "loss": 0.5461, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 0.5792, | |
| "grad_norm": 1.7539013624191284, | |
| "learning_rate": 8.945205988573117e-06, | |
| "loss": 0.5685, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 0.5808, | |
| "grad_norm": 1.4808688163757324, | |
| "learning_rate": 8.936612808377773e-06, | |
| "loss": 0.6238, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.5824, | |
| "grad_norm": 1.4759222269058228, | |
| "learning_rate": 8.92798892968497e-06, | |
| "loss": 0.5214, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 0.584, | |
| "grad_norm": 1.4132190942764282, | |
| "learning_rate": 8.91933441974544e-06, | |
| "loss": 0.5675, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 0.5856, | |
| "grad_norm": 1.2294608354568481, | |
| "learning_rate": 8.910649346048792e-06, | |
| "loss": 0.54, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.5872, | |
| "grad_norm": 1.6163822412490845, | |
| "learning_rate": 8.90193377632298e-06, | |
| "loss": 0.5421, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 0.5888, | |
| "grad_norm": 1.7496393918991089, | |
| "learning_rate": 8.893187778533763e-06, | |
| "loss": 0.5123, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 0.5904, | |
| "grad_norm": 1.3820980787277222, | |
| "learning_rate": 8.88441142088419e-06, | |
| "loss": 0.5852, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 1.424734115600586, | |
| "learning_rate": 8.87560477181406e-06, | |
| "loss": 0.5466, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.5936, | |
| "grad_norm": 1.540697455406189, | |
| "learning_rate": 8.86676789999939e-06, | |
| "loss": 0.5039, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 0.5952, | |
| "grad_norm": 1.3002108335494995, | |
| "learning_rate": 8.857900874351888e-06, | |
| "loss": 0.5674, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.5968, | |
| "grad_norm": 1.3067485094070435, | |
| "learning_rate": 8.849003764018395e-06, | |
| "loss": 0.5698, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 0.5984, | |
| "grad_norm": 1.394988775253296, | |
| "learning_rate": 8.840076638380368e-06, | |
| "loss": 0.5841, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.4560528993606567, | |
| "learning_rate": 8.831119567053323e-06, | |
| "loss": 0.512, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.6016, | |
| "grad_norm": 1.5295023918151855, | |
| "learning_rate": 8.822132619886303e-06, | |
| "loss": 0.5771, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 0.6032, | |
| "grad_norm": 1.659285068511963, | |
| "learning_rate": 8.81311586696133e-06, | |
| "loss": 0.5389, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 0.6048, | |
| "grad_norm": 1.6268696784973145, | |
| "learning_rate": 8.80406937859285e-06, | |
| "loss": 0.5303, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.6064, | |
| "grad_norm": 1.5490257740020752, | |
| "learning_rate": 8.794993225327199e-06, | |
| "loss": 0.5766, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 1.3740674257278442, | |
| "learning_rate": 8.785887477942041e-06, | |
| "loss": 0.5392, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.6096, | |
| "grad_norm": 1.6556349992752075, | |
| "learning_rate": 8.776752207445829e-06, | |
| "loss": 0.5961, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.6112, | |
| "grad_norm": 1.6024754047393799, | |
| "learning_rate": 8.76758748507723e-06, | |
| "loss": 0.5918, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 0.6128, | |
| "grad_norm": 1.5299988985061646, | |
| "learning_rate": 8.758393382304597e-06, | |
| "loss": 0.5308, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 0.6144, | |
| "grad_norm": 1.4135096073150635, | |
| "learning_rate": 8.749169970825384e-06, | |
| "loss": 0.5294, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.616, | |
| "grad_norm": 1.794809103012085, | |
| "learning_rate": 8.73991732256561e-06, | |
| "loss": 0.5867, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 0.6176, | |
| "grad_norm": 1.463224172592163, | |
| "learning_rate": 8.730635509679286e-06, | |
| "loss": 0.6034, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 0.6192, | |
| "grad_norm": 1.2754790782928467, | |
| "learning_rate": 8.721324604547851e-06, | |
| "loss": 0.5528, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.6208, | |
| "grad_norm": 1.2894023656845093, | |
| "learning_rate": 8.711984679779612e-06, | |
| "loss": 0.5507, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 0.6224, | |
| "grad_norm": 1.3894413709640503, | |
| "learning_rate": 8.702615808209185e-06, | |
| "loss": 0.5587, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 1.233060598373413, | |
| "learning_rate": 8.693218062896905e-06, | |
| "loss": 0.5395, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.6256, | |
| "grad_norm": 1.567837119102478, | |
| "learning_rate": 8.683791517128282e-06, | |
| "loss": 0.5852, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 0.6272, | |
| "grad_norm": 1.4712578058242798, | |
| "learning_rate": 8.674336244413413e-06, | |
| "loss": 0.539, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 0.6288, | |
| "grad_norm": 1.484992265701294, | |
| "learning_rate": 8.664852318486412e-06, | |
| "loss": 0.557, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.6304, | |
| "grad_norm": 1.4214953184127808, | |
| "learning_rate": 8.655339813304842e-06, | |
| "loss": 0.5341, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 0.632, | |
| "grad_norm": 1.749202847480774, | |
| "learning_rate": 8.645798803049126e-06, | |
| "loss": 0.5419, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 0.6336, | |
| "grad_norm": 1.6552705764770508, | |
| "learning_rate": 8.636229362121979e-06, | |
| "loss": 0.5563, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.6352, | |
| "grad_norm": 1.6280750036239624, | |
| "learning_rate": 8.626631565147827e-06, | |
| "loss": 0.5317, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 0.6368, | |
| "grad_norm": 1.5368692874908447, | |
| "learning_rate": 8.617005486972214e-06, | |
| "loss": 0.5421, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 0.6384, | |
| "grad_norm": 1.5316983461380005, | |
| "learning_rate": 8.607351202661236e-06, | |
| "loss": 0.5476, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.8238877058029175, | |
| "learning_rate": 8.597668787500937e-06, | |
| "loss": 0.5752, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.6416, | |
| "grad_norm": 1.5838607549667358, | |
| "learning_rate": 8.587958316996739e-06, | |
| "loss": 0.5187, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 0.6432, | |
| "grad_norm": 1.4137400388717651, | |
| "learning_rate": 8.57821986687284e-06, | |
| "loss": 0.5118, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.6448, | |
| "grad_norm": 1.6110658645629883, | |
| "learning_rate": 8.568453513071628e-06, | |
| "loss": 0.544, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 0.6464, | |
| "grad_norm": 1.5780843496322632, | |
| "learning_rate": 8.558659331753096e-06, | |
| "loss": 0.5283, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 0.648, | |
| "grad_norm": 1.8519810438156128, | |
| "learning_rate": 8.548837399294235e-06, | |
| "loss": 0.6182, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.6496, | |
| "grad_norm": 1.629824161529541, | |
| "learning_rate": 8.538987792288447e-06, | |
| "loss": 0.4925, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 0.6512, | |
| "grad_norm": 1.5800502300262451, | |
| "learning_rate": 8.52911058754495e-06, | |
| "loss": 0.5993, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 0.6528, | |
| "grad_norm": 1.3768564462661743, | |
| "learning_rate": 8.519205862088165e-06, | |
| "loss": 0.5247, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.6544, | |
| "grad_norm": 1.6619504690170288, | |
| "learning_rate": 8.509273693157133e-06, | |
| "loss": 0.5581, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 1.3011277914047241, | |
| "learning_rate": 8.499314158204904e-06, | |
| "loss": 0.5446, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.6576, | |
| "grad_norm": 1.3294904232025146, | |
| "learning_rate": 8.48932733489793e-06, | |
| "loss": 0.5478, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.6592, | |
| "grad_norm": 1.570875644683838, | |
| "learning_rate": 8.479313301115467e-06, | |
| "loss": 0.5596, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 0.6608, | |
| "grad_norm": 1.3935585021972656, | |
| "learning_rate": 8.469272134948963e-06, | |
| "loss": 0.5149, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 0.6624, | |
| "grad_norm": 1.2322958707809448, | |
| "learning_rate": 8.459203914701444e-06, | |
| "loss": 0.5587, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.664, | |
| "grad_norm": 1.400680661201477, | |
| "learning_rate": 8.449108718886919e-06, | |
| "loss": 0.5678, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 0.6656, | |
| "grad_norm": 1.8527023792266846, | |
| "learning_rate": 8.43898662622975e-06, | |
| "loss": 0.553, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 0.6672, | |
| "grad_norm": 1.6967326402664185, | |
| "learning_rate": 8.42883771566405e-06, | |
| "loss": 0.596, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.6688, | |
| "grad_norm": 1.9978325366973877, | |
| "learning_rate": 8.418662066333063e-06, | |
| "loss": 0.5471, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 0.6704, | |
| "grad_norm": 1.5405136346817017, | |
| "learning_rate": 8.408459757588547e-06, | |
| "loss": 0.5575, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 1.499711275100708, | |
| "learning_rate": 8.398230868990151e-06, | |
| "loss": 0.5275, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.6736, | |
| "grad_norm": 1.6597280502319336, | |
| "learning_rate": 8.387975480304808e-06, | |
| "loss": 0.5742, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 0.6752, | |
| "grad_norm": 1.6091039180755615, | |
| "learning_rate": 8.377693671506094e-06, | |
| "loss": 0.5704, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 0.6768, | |
| "grad_norm": 1.2908363342285156, | |
| "learning_rate": 8.367385522773625e-06, | |
| "loss": 0.5449, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.6784, | |
| "grad_norm": 1.4047667980194092, | |
| "learning_rate": 8.357051114492414e-06, | |
| "loss": 0.5478, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.5797241926193237, | |
| "learning_rate": 8.34669052725225e-06, | |
| "loss": 0.5909, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 0.6816, | |
| "grad_norm": 1.4079400300979614, | |
| "learning_rate": 8.336303841847073e-06, | |
| "loss": 0.5501, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.6832, | |
| "grad_norm": 1.3590975999832153, | |
| "learning_rate": 8.325891139274348e-06, | |
| "loss": 0.5831, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 0.6848, | |
| "grad_norm": 1.7556718587875366, | |
| "learning_rate": 8.315452500734415e-06, | |
| "loss": 0.5717, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 0.6864, | |
| "grad_norm": 1.5577335357666016, | |
| "learning_rate": 8.304988007629878e-06, | |
| "loss": 0.5645, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 1.3396000862121582, | |
| "learning_rate": 8.294497741564953e-06, | |
| "loss": 0.4707, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.6896, | |
| "grad_norm": 1.541326642036438, | |
| "learning_rate": 8.283981784344847e-06, | |
| "loss": 0.576, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 0.6912, | |
| "grad_norm": 1.542346477508545, | |
| "learning_rate": 8.273440217975103e-06, | |
| "loss": 0.5888, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.6928, | |
| "grad_norm": 1.4267380237579346, | |
| "learning_rate": 8.262873124660976e-06, | |
| "loss": 0.4841, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 0.6944, | |
| "grad_norm": 2.111485004425049, | |
| "learning_rate": 8.252280586806778e-06, | |
| "loss": 0.6012, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 0.696, | |
| "grad_norm": 1.5450475215911865, | |
| "learning_rate": 8.241662687015251e-06, | |
| "loss": 0.5215, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.6976, | |
| "grad_norm": 1.6567203998565674, | |
| "learning_rate": 8.231019508086908e-06, | |
| "loss": 0.5833, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 0.6992, | |
| "grad_norm": 1.5840221643447876, | |
| "learning_rate": 8.2203511330194e-06, | |
| "loss": 0.5661, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 0.7008, | |
| "grad_norm": 1.5503212213516235, | |
| "learning_rate": 8.209657645006854e-06, | |
| "loss": 0.5634, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.7024, | |
| "grad_norm": 1.3689537048339844, | |
| "learning_rate": 8.19893912743924e-06, | |
| "loss": 0.561, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 1.323249101638794, | |
| "learning_rate": 8.18819566390171e-06, | |
| "loss": 0.5377, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.7056, | |
| "grad_norm": 1.4463880062103271, | |
| "learning_rate": 8.177427338173955e-06, | |
| "loss": 0.5161, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.7072, | |
| "grad_norm": 1.4231421947479248, | |
| "learning_rate": 8.166634234229535e-06, | |
| "loss": 0.54, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 0.7088, | |
| "grad_norm": 1.4519294500350952, | |
| "learning_rate": 8.15581643623525e-06, | |
| "loss": 0.5383, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 0.7104, | |
| "grad_norm": 1.4137227535247803, | |
| "learning_rate": 8.144974028550456e-06, | |
| "loss": 0.5499, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.712, | |
| "grad_norm": 1.417708396911621, | |
| "learning_rate": 8.13410709572643e-06, | |
| "loss": 0.5712, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 0.7136, | |
| "grad_norm": 1.6497905254364014, | |
| "learning_rate": 8.123215722505695e-06, | |
| "loss": 0.5028, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 0.7152, | |
| "grad_norm": 1.422498106956482, | |
| "learning_rate": 8.112299993821366e-06, | |
| "loss": 0.5783, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.7168, | |
| "grad_norm": 1.3096020221710205, | |
| "learning_rate": 8.101359994796494e-06, | |
| "loss": 0.5212, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 0.7184, | |
| "grad_norm": 1.3320119380950928, | |
| "learning_rate": 8.090395810743382e-06, | |
| "loss": 0.4932, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.8773503303527832, | |
| "learning_rate": 8.079407527162944e-06, | |
| "loss": 0.6212, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.7216, | |
| "grad_norm": 1.7598968744277954, | |
| "learning_rate": 8.06839522974402e-06, | |
| "loss": 0.5165, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 0.7232, | |
| "grad_norm": 1.6123844385147095, | |
| "learning_rate": 8.057359004362719e-06, | |
| "loss": 0.542, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 0.7248, | |
| "grad_norm": 1.5737981796264648, | |
| "learning_rate": 8.046298937081742e-06, | |
| "loss": 0.4967, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.7264, | |
| "grad_norm": 1.3083248138427734, | |
| "learning_rate": 8.035215114149719e-06, | |
| "loss": 0.5272, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 0.728, | |
| "grad_norm": 1.6396276950836182, | |
| "learning_rate": 8.024107622000524e-06, | |
| "loss": 0.6017, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 0.7296, | |
| "grad_norm": 1.1681655645370483, | |
| "learning_rate": 8.012976547252614e-06, | |
| "loss": 0.5823, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.7312, | |
| "grad_norm": 1.5292519330978394, | |
| "learning_rate": 8.001821976708344e-06, | |
| "loss": 0.4578, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 0.7328, | |
| "grad_norm": 1.319521427154541, | |
| "learning_rate": 7.990643997353296e-06, | |
| "loss": 0.5627, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 0.7344, | |
| "grad_norm": 1.6441938877105713, | |
| "learning_rate": 7.979442696355601e-06, | |
| "loss": 0.5407, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 1.309098720550537, | |
| "learning_rate": 7.968218161065253e-06, | |
| "loss": 0.4951, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.7376, | |
| "grad_norm": 1.6334298849105835, | |
| "learning_rate": 7.956970479013433e-06, | |
| "loss": 0.5634, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 0.7392, | |
| "grad_norm": 1.2602206468582153, | |
| "learning_rate": 7.945699737911825e-06, | |
| "loss": 0.4869, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.7408, | |
| "grad_norm": 1.3879119157791138, | |
| "learning_rate": 7.93440602565193e-06, | |
| "loss": 0.5132, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 0.7424, | |
| "grad_norm": 1.6773675680160522, | |
| "learning_rate": 7.92308943030439e-06, | |
| "loss": 0.5557, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 0.744, | |
| "grad_norm": 1.3643486499786377, | |
| "learning_rate": 7.911750040118282e-06, | |
| "loss": 0.5432, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.7456, | |
| "grad_norm": 1.6675084829330444, | |
| "learning_rate": 7.900387943520453e-06, | |
| "loss": 0.5154, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 0.7472, | |
| "grad_norm": 1.6450365781784058, | |
| "learning_rate": 7.889003229114816e-06, | |
| "loss": 0.5713, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 0.7488, | |
| "grad_norm": 1.1581707000732422, | |
| "learning_rate": 7.877595985681656e-06, | |
| "loss": 0.5864, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.7504, | |
| "grad_norm": 1.6596333980560303, | |
| "learning_rate": 7.866166302176952e-06, | |
| "loss": 0.5053, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 1.2243446111679077, | |
| "learning_rate": 7.854714267731673e-06, | |
| "loss": 0.5514, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.7536, | |
| "grad_norm": 1.3545331954956055, | |
| "learning_rate": 7.84323997165108e-06, | |
| "loss": 0.5261, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.7552, | |
| "grad_norm": 1.4437392950057983, | |
| "learning_rate": 7.831743503414043e-06, | |
| "loss": 0.5066, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 0.7568, | |
| "grad_norm": 1.6438108682632446, | |
| "learning_rate": 7.820224952672329e-06, | |
| "loss": 0.5726, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 0.7584, | |
| "grad_norm": 1.470716953277588, | |
| "learning_rate": 7.80868440924991e-06, | |
| "loss": 0.4925, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.3645358085632324, | |
| "learning_rate": 7.797121963142263e-06, | |
| "loss": 0.5493, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 0.7616, | |
| "grad_norm": 1.4535894393920898, | |
| "learning_rate": 7.785537704515662e-06, | |
| "loss": 0.5479, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 0.7632, | |
| "grad_norm": 1.6181972026824951, | |
| "learning_rate": 7.773931723706487e-06, | |
| "loss": 0.5568, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.7648, | |
| "grad_norm": 1.4290663003921509, | |
| "learning_rate": 7.762304111220506e-06, | |
| "loss": 0.4873, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 0.7664, | |
| "grad_norm": 1.4261584281921387, | |
| "learning_rate": 7.750654957732179e-06, | |
| "loss": 0.5125, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 1.1970328092575073, | |
| "learning_rate": 7.738984354083942e-06, | |
| "loss": 0.4793, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.7696, | |
| "grad_norm": 1.2892889976501465, | |
| "learning_rate": 7.727292391285507e-06, | |
| "loss": 0.5154, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 0.7712, | |
| "grad_norm": 1.5146937370300293, | |
| "learning_rate": 7.715579160513152e-06, | |
| "loss": 0.5284, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 0.7728, | |
| "grad_norm": 1.5794085264205933, | |
| "learning_rate": 7.703844753108997e-06, | |
| "loss": 0.5124, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.7744, | |
| "grad_norm": 1.302384853363037, | |
| "learning_rate": 7.692089260580315e-06, | |
| "loss": 0.5333, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 0.776, | |
| "grad_norm": 1.4886388778686523, | |
| "learning_rate": 7.680312774598794e-06, | |
| "loss": 0.5718, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 0.7776, | |
| "grad_norm": 1.4579716920852661, | |
| "learning_rate": 7.668515386999837e-06, | |
| "loss": 0.5792, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.7792, | |
| "grad_norm": 1.3708596229553223, | |
| "learning_rate": 7.656697189781846e-06, | |
| "loss": 0.5723, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 0.7808, | |
| "grad_norm": 1.5587267875671387, | |
| "learning_rate": 7.644858275105494e-06, | |
| "loss": 0.5315, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 0.7824, | |
| "grad_norm": 1.6092159748077393, | |
| "learning_rate": 7.632998735293016e-06, | |
| "loss": 0.5216, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 1.511612892150879, | |
| "learning_rate": 7.621118662827487e-06, | |
| "loss": 0.5293, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.7856, | |
| "grad_norm": 1.7856494188308716, | |
| "learning_rate": 7.609218150352098e-06, | |
| "loss": 0.6262, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 0.7872, | |
| "grad_norm": 1.4585975408554077, | |
| "learning_rate": 7.597297290669437e-06, | |
| "loss": 0.4742, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.7888, | |
| "grad_norm": 1.3418307304382324, | |
| "learning_rate": 7.585356176740759e-06, | |
| "loss": 0.5299, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 0.7904, | |
| "grad_norm": 1.3561956882476807, | |
| "learning_rate": 7.573394901685271e-06, | |
| "loss": 0.4855, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 0.792, | |
| "grad_norm": 1.2393639087677002, | |
| "learning_rate": 7.561413558779401e-06, | |
| "loss": 0.5847, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.7936, | |
| "grad_norm": 1.4676223993301392, | |
| "learning_rate": 7.5494122414560645e-06, | |
| "loss": 0.5374, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 0.7952, | |
| "grad_norm": 1.2547037601470947, | |
| "learning_rate": 7.537391043303947e-06, | |
| "loss": 0.4984, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 0.7968, | |
| "grad_norm": 1.5870437622070312, | |
| "learning_rate": 7.525350058066765e-06, | |
| "loss": 0.532, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.7984, | |
| "grad_norm": 1.6154413223266602, | |
| "learning_rate": 7.513289379642541e-06, | |
| "loss": 0.5845, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.4688035249710083, | |
| "learning_rate": 7.501209102082867e-06, | |
| "loss": 0.4814, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.8016, | |
| "grad_norm": 1.6249189376831055, | |
| "learning_rate": 7.4891093195921764e-06, | |
| "loss": 0.6198, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.8032, | |
| "grad_norm": 1.5225543975830078, | |
| "learning_rate": 7.476990126527e-06, | |
| "loss": 0.5004, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 0.8048, | |
| "grad_norm": 1.7540644407272339, | |
| "learning_rate": 7.464851617395244e-06, | |
| "loss": 0.5475, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 0.8064, | |
| "grad_norm": 1.5555365085601807, | |
| "learning_rate": 7.452693886855438e-06, | |
| "loss": 0.489, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.808, | |
| "grad_norm": 1.4354197978973389, | |
| "learning_rate": 7.440517029716008e-06, | |
| "loss": 0.566, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 0.8096, | |
| "grad_norm": 1.1562589406967163, | |
| "learning_rate": 7.428321140934532e-06, | |
| "loss": 0.5036, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 0.8112, | |
| "grad_norm": 1.3230618238449097, | |
| "learning_rate": 7.416106315617e-06, | |
| "loss": 0.5499, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.8128, | |
| "grad_norm": 1.5580434799194336, | |
| "learning_rate": 7.403872649017074e-06, | |
| "loss": 0.5576, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 0.8144, | |
| "grad_norm": 1.3257368803024292, | |
| "learning_rate": 7.391620236535345e-06, | |
| "loss": 0.5825, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 0.816, | |
| "grad_norm": 1.5289413928985596, | |
| "learning_rate": 7.379349173718585e-06, | |
| "loss": 0.5159, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.8176, | |
| "grad_norm": 1.5701593160629272, | |
| "learning_rate": 7.367059556259008e-06, | |
| "loss": 0.5666, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 0.8192, | |
| "grad_norm": 1.6312825679779053, | |
| "learning_rate": 7.354751479993518e-06, | |
| "loss": 0.5231, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 0.8208, | |
| "grad_norm": 1.323542833328247, | |
| "learning_rate": 7.342425040902967e-06, | |
| "loss": 0.5675, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.8224, | |
| "grad_norm": 1.4468830823898315, | |
| "learning_rate": 7.330080335111405e-06, | |
| "loss": 0.536, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 0.824, | |
| "grad_norm": 1.5159915685653687, | |
| "learning_rate": 7.317717458885324e-06, | |
| "loss": 0.5629, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 0.8256, | |
| "grad_norm": 1.2929723262786865, | |
| "learning_rate": 7.30533650863292e-06, | |
| "loss": 0.4625, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.8272, | |
| "grad_norm": 1.4054374694824219, | |
| "learning_rate": 7.292937580903326e-06, | |
| "loss": 0.5272, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 0.8288, | |
| "grad_norm": 1.334098219871521, | |
| "learning_rate": 7.280520772385875e-06, | |
| "loss": 0.5366, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 0.8304, | |
| "grad_norm": 1.4377784729003906, | |
| "learning_rate": 7.268086179909331e-06, | |
| "loss": 0.5527, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 1.4365525245666504, | |
| "learning_rate": 7.255633900441147e-06, | |
| "loss": 0.5986, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.8336, | |
| "grad_norm": 1.108258843421936, | |
| "learning_rate": 7.243164031086697e-06, | |
| "loss": 0.5425, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 0.8352, | |
| "grad_norm": 1.5322009325027466, | |
| "learning_rate": 7.23067666908853e-06, | |
| "loss": 0.5798, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.8368, | |
| "grad_norm": 1.336838722229004, | |
| "learning_rate": 7.2181719118256e-06, | |
| "loss": 0.5158, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 0.8384, | |
| "grad_norm": 1.229859709739685, | |
| "learning_rate": 7.205649856812519e-06, | |
| "loss": 0.4978, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.6400887966156006, | |
| "learning_rate": 7.193110601698785e-06, | |
| "loss": 0.5258, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.8416, | |
| "grad_norm": 1.5071361064910889, | |
| "learning_rate": 7.18055424426803e-06, | |
| "loss": 0.5133, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 0.8432, | |
| "grad_norm": 1.5416686534881592, | |
| "learning_rate": 7.167980882437251e-06, | |
| "loss": 0.5335, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 0.8448, | |
| "grad_norm": 1.2770659923553467, | |
| "learning_rate": 7.155390614256048e-06, | |
| "loss": 0.5034, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.8464, | |
| "grad_norm": 1.2937873601913452, | |
| "learning_rate": 7.142783537905864e-06, | |
| "loss": 0.5127, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 0.848, | |
| "grad_norm": 1.8700532913208008, | |
| "learning_rate": 7.130159751699211e-06, | |
| "loss": 0.5974, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.8496, | |
| "grad_norm": 1.4665712118148804, | |
| "learning_rate": 7.11751935407891e-06, | |
| "loss": 0.5358, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.8512, | |
| "grad_norm": 1.259859323501587, | |
| "learning_rate": 7.104862443617322e-06, | |
| "loss": 0.4898, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 0.8528, | |
| "grad_norm": 1.6324716806411743, | |
| "learning_rate": 7.092189119015575e-06, | |
| "loss": 0.5102, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 0.8544, | |
| "grad_norm": 1.9136959314346313, | |
| "learning_rate": 7.079499479102802e-06, | |
| "loss": 0.5624, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.856, | |
| "grad_norm": 1.4565120935440063, | |
| "learning_rate": 7.066793622835364e-06, | |
| "loss": 0.5233, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 0.8576, | |
| "grad_norm": 1.4126449823379517, | |
| "learning_rate": 7.054071649296078e-06, | |
| "loss": 0.5039, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 0.8592, | |
| "grad_norm": 1.7465589046478271, | |
| "learning_rate": 7.041333657693452e-06, | |
| "loss": 0.5077, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.8608, | |
| "grad_norm": 1.477338194847107, | |
| "learning_rate": 7.028579747360903e-06, | |
| "loss": 0.5235, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 0.8624, | |
| "grad_norm": 1.7635746002197266, | |
| "learning_rate": 7.015810017755985e-06, | |
| "loss": 0.588, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 0.864, | |
| "grad_norm": 1.5359976291656494, | |
| "learning_rate": 7.003024568459614e-06, | |
| "loss": 0.5576, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.8656, | |
| "grad_norm": 1.4867557287216187, | |
| "learning_rate": 6.9902234991752945e-06, | |
| "loss": 0.516, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 0.8672, | |
| "grad_norm": 1.4478759765625, | |
| "learning_rate": 6.977406909728335e-06, | |
| "loss": 0.583, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 0.8688, | |
| "grad_norm": 1.6378670930862427, | |
| "learning_rate": 6.964574900065072e-06, | |
| "loss": 0.5006, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.8704, | |
| "grad_norm": 1.513391137123108, | |
| "learning_rate": 6.9517275702521e-06, | |
| "loss": 0.5252, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 0.872, | |
| "grad_norm": 1.595704436302185, | |
| "learning_rate": 6.938865020475471e-06, | |
| "loss": 0.5046, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 0.8736, | |
| "grad_norm": 1.7334957122802734, | |
| "learning_rate": 6.925987351039936e-06, | |
| "loss": 0.4936, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.8752, | |
| "grad_norm": 1.7050503492355347, | |
| "learning_rate": 6.913094662368147e-06, | |
| "loss": 0.5965, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 0.8768, | |
| "grad_norm": 1.6849944591522217, | |
| "learning_rate": 6.900187054999883e-06, | |
| "loss": 0.6515, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 0.8784, | |
| "grad_norm": 1.8494666814804077, | |
| "learning_rate": 6.887264629591254e-06, | |
| "loss": 0.5197, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.3329585790634155, | |
| "learning_rate": 6.874327486913933e-06, | |
| "loss": 0.5013, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.8816, | |
| "grad_norm": 1.3797527551651, | |
| "learning_rate": 6.861375727854356e-06, | |
| "loss": 0.5276, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 0.8832, | |
| "grad_norm": 1.409279704093933, | |
| "learning_rate": 6.848409453412943e-06, | |
| "loss": 0.5186, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.8848, | |
| "grad_norm": 1.3078670501708984, | |
| "learning_rate": 6.8354287647033046e-06, | |
| "loss": 0.5059, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 0.8864, | |
| "grad_norm": 1.550347089767456, | |
| "learning_rate": 6.8224337629514615e-06, | |
| "loss": 0.4994, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 0.888, | |
| "grad_norm": 1.7249889373779297, | |
| "learning_rate": 6.809424549495045e-06, | |
| "loss": 0.5668, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.8896, | |
| "grad_norm": 1.4597032070159912, | |
| "learning_rate": 6.796401225782517e-06, | |
| "loss": 0.5338, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 0.8912, | |
| "grad_norm": 1.746682047843933, | |
| "learning_rate": 6.783363893372372e-06, | |
| "loss": 0.6004, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 0.8928, | |
| "grad_norm": 1.385525107383728, | |
| "learning_rate": 6.770312653932346e-06, | |
| "loss": 0.5401, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.8944, | |
| "grad_norm": 1.465954065322876, | |
| "learning_rate": 6.757247609238625e-06, | |
| "loss": 0.5705, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 1.305722713470459, | |
| "learning_rate": 6.744168861175056e-06, | |
| "loss": 0.5149, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.8976, | |
| "grad_norm": 1.381870150566101, | |
| "learning_rate": 6.731076511732338e-06, | |
| "loss": 0.5239, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 0.8992, | |
| "grad_norm": 1.7610459327697754, | |
| "learning_rate": 6.717970663007245e-06, | |
| "loss": 0.506, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 0.9008, | |
| "grad_norm": 1.9110548496246338, | |
| "learning_rate": 6.704851417201821e-06, | |
| "loss": 0.5244, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 0.9024, | |
| "grad_norm": 1.5605720281600952, | |
| "learning_rate": 6.6917188766225736e-06, | |
| "loss": 0.585, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 0.904, | |
| "grad_norm": 1.4022600650787354, | |
| "learning_rate": 6.678573143679696e-06, | |
| "loss": 0.4975, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 0.9056, | |
| "grad_norm": 1.597119927406311, | |
| "learning_rate": 6.665414320886256e-06, | |
| "loss": 0.585, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 0.9072, | |
| "grad_norm": 1.4175268411636353, | |
| "learning_rate": 6.652242510857395e-06, | |
| "loss": 0.5484, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 0.9088, | |
| "grad_norm": 1.6243127584457397, | |
| "learning_rate": 6.639057816309532e-06, | |
| "loss": 0.5521, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 0.9104, | |
| "grad_norm": 1.4473142623901367, | |
| "learning_rate": 6.625860340059567e-06, | |
| "loss": 0.5489, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 0.912, | |
| "grad_norm": 1.7066948413848877, | |
| "learning_rate": 6.612650185024068e-06, | |
| "loss": 0.5382, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.9136, | |
| "grad_norm": 1.2897255420684814, | |
| "learning_rate": 6.599427454218479e-06, | |
| "loss": 0.5105, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 0.9152, | |
| "grad_norm": 1.4342617988586426, | |
| "learning_rate": 6.586192250756312e-06, | |
| "loss": 0.5034, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 0.9168, | |
| "grad_norm": 1.6727919578552246, | |
| "learning_rate": 6.5729446778483395e-06, | |
| "loss": 0.5894, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 0.9184, | |
| "grad_norm": 1.6118981838226318, | |
| "learning_rate": 6.559684838801798e-06, | |
| "loss": 0.5427, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.4792639017105103, | |
| "learning_rate": 6.546412837019577e-06, | |
| "loss": 0.5578, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 0.9216, | |
| "grad_norm": 1.352918028831482, | |
| "learning_rate": 6.533128775999411e-06, | |
| "loss": 0.5292, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 0.9232, | |
| "grad_norm": 1.3862431049346924, | |
| "learning_rate": 6.519832759333076e-06, | |
| "loss": 0.5033, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 0.9248, | |
| "grad_norm": 1.6580413579940796, | |
| "learning_rate": 6.506524890705581e-06, | |
| "loss": 0.5289, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 0.9264, | |
| "grad_norm": 1.1765118837356567, | |
| "learning_rate": 6.493205273894361e-06, | |
| "loss": 0.4882, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 0.928, | |
| "grad_norm": 1.3686548471450806, | |
| "learning_rate": 6.479874012768459e-06, | |
| "loss": 0.548, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.9296, | |
| "grad_norm": 1.4441702365875244, | |
| "learning_rate": 6.4665312112877325e-06, | |
| "loss": 0.6003, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 0.9312, | |
| "grad_norm": 1.671318769454956, | |
| "learning_rate": 6.453176973502024e-06, | |
| "loss": 0.5613, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 0.9328, | |
| "grad_norm": 1.3213675022125244, | |
| "learning_rate": 6.4398114035503644e-06, | |
| "loss": 0.4878, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 0.9344, | |
| "grad_norm": 1.2262921333312988, | |
| "learning_rate": 6.426434605660151e-06, | |
| "loss": 0.5642, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 0.936, | |
| "grad_norm": 1.4129204750061035, | |
| "learning_rate": 6.413046684146343e-06, | |
| "loss": 0.5354, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.9376, | |
| "grad_norm": 1.6236296892166138, | |
| "learning_rate": 6.3996477434106405e-06, | |
| "loss": 0.6059, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 0.9392, | |
| "grad_norm": 1.4231986999511719, | |
| "learning_rate": 6.3862378879406765e-06, | |
| "loss": 0.5073, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 0.9408, | |
| "grad_norm": 1.6488242149353027, | |
| "learning_rate": 6.372817222309194e-06, | |
| "loss": 0.5145, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 0.9424, | |
| "grad_norm": 1.1615115404129028, | |
| "learning_rate": 6.3593858511732446e-06, | |
| "loss": 0.5126, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 0.944, | |
| "grad_norm": 1.631441593170166, | |
| "learning_rate": 6.345943879273353e-06, | |
| "loss": 0.5684, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.9456, | |
| "grad_norm": 1.3466230630874634, | |
| "learning_rate": 6.3324914114327206e-06, | |
| "loss": 0.5119, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 0.9472, | |
| "grad_norm": 1.5755928754806519, | |
| "learning_rate": 6.319028552556393e-06, | |
| "loss": 0.5469, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 0.9488, | |
| "grad_norm": 1.829716682434082, | |
| "learning_rate": 6.305555407630447e-06, | |
| "loss": 0.5291, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 0.9504, | |
| "grad_norm": 1.2437143325805664, | |
| "learning_rate": 6.292072081721173e-06, | |
| "loss": 0.4769, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 0.952, | |
| "grad_norm": 1.4398503303527832, | |
| "learning_rate": 6.278578679974259e-06, | |
| "loss": 0.5463, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 0.9536, | |
| "grad_norm": 2.08911395072937, | |
| "learning_rate": 6.265075307613956e-06, | |
| "loss": 0.4973, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 0.9552, | |
| "grad_norm": 1.2867152690887451, | |
| "learning_rate": 6.2515620699422775e-06, | |
| "loss": 0.5478, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 0.9568, | |
| "grad_norm": 1.9239723682403564, | |
| "learning_rate": 6.2380390723381666e-06, | |
| "loss": 0.6072, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 0.9584, | |
| "grad_norm": 1.7422161102294922, | |
| "learning_rate": 6.224506420256673e-06, | |
| "loss": 0.5279, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.52890944480896, | |
| "learning_rate": 6.210964219228135e-06, | |
| "loss": 0.5473, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.9616, | |
| "grad_norm": 1.5165317058563232, | |
| "learning_rate": 6.197412574857361e-06, | |
| "loss": 0.5523, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 0.9632, | |
| "grad_norm": 1.4884533882141113, | |
| "learning_rate": 6.1838515928227925e-06, | |
| "loss": 0.553, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 0.9648, | |
| "grad_norm": 1.6735478639602661, | |
| "learning_rate": 6.170281378875692e-06, | |
| "loss": 0.5421, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 0.9664, | |
| "grad_norm": 1.38810396194458, | |
| "learning_rate": 6.1567020388393155e-06, | |
| "loss": 0.5192, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 0.968, | |
| "grad_norm": 1.4100205898284912, | |
| "learning_rate": 6.143113678608081e-06, | |
| "loss": 0.5338, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 0.9696, | |
| "grad_norm": 1.4732227325439453, | |
| "learning_rate": 6.1295164041467545e-06, | |
| "loss": 0.4925, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 0.9712, | |
| "grad_norm": 1.4747562408447266, | |
| "learning_rate": 6.115910321489613e-06, | |
| "loss": 0.508, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 0.9728, | |
| "grad_norm": 1.7133896350860596, | |
| "learning_rate": 6.102295536739622e-06, | |
| "loss": 0.5157, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 0.9744, | |
| "grad_norm": 1.309938907623291, | |
| "learning_rate": 6.088672156067607e-06, | |
| "loss": 0.5703, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 0.976, | |
| "grad_norm": 1.614957332611084, | |
| "learning_rate": 6.075040285711427e-06, | |
| "loss": 0.5469, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.9776, | |
| "grad_norm": 1.4487053155899048, | |
| "learning_rate": 6.061400031975147e-06, | |
| "loss": 0.56, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 0.9792, | |
| "grad_norm": 1.3841825723648071, | |
| "learning_rate": 6.047751501228203e-06, | |
| "loss": 0.5176, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 0.9808, | |
| "grad_norm": 1.5188674926757812, | |
| "learning_rate": 6.034094799904583e-06, | |
| "loss": 0.4936, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 0.9824, | |
| "grad_norm": 1.1297731399536133, | |
| "learning_rate": 6.020430034501986e-06, | |
| "loss": 0.5166, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 0.984, | |
| "grad_norm": 1.3134491443634033, | |
| "learning_rate": 6.0067573115809965e-06, | |
| "loss": 0.5366, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.9856, | |
| "grad_norm": 1.3343524932861328, | |
| "learning_rate": 5.993076737764254e-06, | |
| "loss": 0.4959, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 0.9872, | |
| "grad_norm": 2.18261456489563, | |
| "learning_rate": 5.979388419735625e-06, | |
| "loss": 0.5693, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 0.9888, | |
| "grad_norm": 1.3148341178894043, | |
| "learning_rate": 5.965692464239358e-06, | |
| "loss": 0.5225, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 0.9904, | |
| "grad_norm": 1.3353829383850098, | |
| "learning_rate": 5.951988978079268e-06, | |
| "loss": 0.5913, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 0.992, | |
| "grad_norm": 1.425750970840454, | |
| "learning_rate": 5.9382780681178935e-06, | |
| "loss": 0.5503, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.9936, | |
| "grad_norm": 1.6471588611602783, | |
| "learning_rate": 5.924559841275661e-06, | |
| "loss": 0.5446, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 0.9952, | |
| "grad_norm": 1.5088058710098267, | |
| "learning_rate": 5.910834404530064e-06, | |
| "loss": 0.4901, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 0.9968, | |
| "grad_norm": 1.5489749908447266, | |
| "learning_rate": 5.897101864914814e-06, | |
| "loss": 0.5127, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 0.9984, | |
| "grad_norm": 1.836721658706665, | |
| "learning_rate": 5.8833623295190104e-06, | |
| "loss": 0.5563, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.5311901569366455, | |
| "learning_rate": 5.869615905486313e-06, | |
| "loss": 0.5816, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 1.0016, | |
| "grad_norm": 1.5278863906860352, | |
| "learning_rate": 5.855862700014096e-06, | |
| "loss": 0.4033, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 1.0032, | |
| "grad_norm": 1.4977178573608398, | |
| "learning_rate": 5.842102820352623e-06, | |
| "loss": 0.3912, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 1.0048, | |
| "grad_norm": 1.4251818656921387, | |
| "learning_rate": 5.8283363738041945e-06, | |
| "loss": 0.4479, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 1.0064, | |
| "grad_norm": 1.7043424844741821, | |
| "learning_rate": 5.814563467722328e-06, | |
| "loss": 0.4476, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 1.008, | |
| "grad_norm": 1.5264893770217896, | |
| "learning_rate": 5.80078420951091e-06, | |
| "loss": 0.3561, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 1.0096, | |
| "grad_norm": 1.4412972927093506, | |
| "learning_rate": 5.786998706623365e-06, | |
| "loss": 0.4003, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 1.0112, | |
| "grad_norm": 2.0022106170654297, | |
| "learning_rate": 5.773207066561817e-06, | |
| "loss": 0.4251, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 1.0128, | |
| "grad_norm": 1.3563566207885742, | |
| "learning_rate": 5.759409396876242e-06, | |
| "loss": 0.3939, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 1.0144, | |
| "grad_norm": 1.8401267528533936, | |
| "learning_rate": 5.745605805163641e-06, | |
| "loss": 0.4057, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 1.016, | |
| "grad_norm": 1.7037734985351562, | |
| "learning_rate": 5.731796399067194e-06, | |
| "loss": 0.3767, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 1.0176, | |
| "grad_norm": 1.807308554649353, | |
| "learning_rate": 5.7179812862754265e-06, | |
| "loss": 0.4285, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 1.0192, | |
| "grad_norm": 1.44683837890625, | |
| "learning_rate": 5.7041605745213605e-06, | |
| "loss": 0.4206, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 1.0208, | |
| "grad_norm": 1.5524386167526245, | |
| "learning_rate": 5.690334371581683e-06, | |
| "loss": 0.4464, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 1.0224, | |
| "grad_norm": 1.2644178867340088, | |
| "learning_rate": 5.6765027852759015e-06, | |
| "loss": 0.4221, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 1.024, | |
| "grad_norm": 1.4757063388824463, | |
| "learning_rate": 5.662665923465508e-06, | |
| "loss": 0.3773, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 1.0256, | |
| "grad_norm": 1.49785315990448, | |
| "learning_rate": 5.6488238940531256e-06, | |
| "loss": 0.363, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 1.0272, | |
| "grad_norm": 1.5532892942428589, | |
| "learning_rate": 5.634976804981682e-06, | |
| "loss": 0.3861, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 1.0288, | |
| "grad_norm": 1.5950244665145874, | |
| "learning_rate": 5.621124764233561e-06, | |
| "loss": 0.3881, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 1.0304, | |
| "grad_norm": 1.182177186012268, | |
| "learning_rate": 5.607267879829757e-06, | |
| "loss": 0.384, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 1.032, | |
| "grad_norm": 1.4916887283325195, | |
| "learning_rate": 5.593406259829038e-06, | |
| "loss": 0.387, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 1.0336, | |
| "grad_norm": 1.5078849792480469, | |
| "learning_rate": 5.579540012327103e-06, | |
| "loss": 0.3942, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 1.0352, | |
| "grad_norm": 1.3654046058654785, | |
| "learning_rate": 5.565669245455735e-06, | |
| "loss": 0.4137, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 1.0368, | |
| "grad_norm": 1.5723360776901245, | |
| "learning_rate": 5.551794067381959e-06, | |
| "loss": 0.3949, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 1.0384, | |
| "grad_norm": 1.4671664237976074, | |
| "learning_rate": 5.537914586307204e-06, | |
| "loss": 0.3641, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 1.9938820600509644, | |
| "learning_rate": 5.524030910466447e-06, | |
| "loss": 0.404, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 1.0416, | |
| "grad_norm": 1.482224464416504, | |
| "learning_rate": 5.510143148127384e-06, | |
| "loss": 0.3956, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 1.0432, | |
| "grad_norm": 1.8510936498641968, | |
| "learning_rate": 5.4962514075895746e-06, | |
| "loss": 0.3955, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 1.0448, | |
| "grad_norm": 1.3378214836120605, | |
| "learning_rate": 5.482355797183602e-06, | |
| "loss": 0.3583, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 1.0464, | |
| "grad_norm": 1.4335100650787354, | |
| "learning_rate": 5.468456425270229e-06, | |
| "loss": 0.4571, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 1.048, | |
| "grad_norm": 1.85202956199646, | |
| "learning_rate": 5.454553400239548e-06, | |
| "loss": 0.3948, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 1.0496, | |
| "grad_norm": 1.6900779008865356, | |
| "learning_rate": 5.440646830510142e-06, | |
| "loss": 0.3937, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 1.0512, | |
| "grad_norm": 1.4823509454727173, | |
| "learning_rate": 5.426736824528236e-06, | |
| "loss": 0.3756, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 1.0528, | |
| "grad_norm": 1.4467169046401978, | |
| "learning_rate": 5.412823490766849e-06, | |
| "loss": 0.3807, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 1.0544, | |
| "grad_norm": 1.4401826858520508, | |
| "learning_rate": 5.398906937724954e-06, | |
| "loss": 0.3856, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 1.056, | |
| "grad_norm": 1.5513033866882324, | |
| "learning_rate": 5.384987273926625e-06, | |
| "loss": 0.4103, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 1.0576, | |
| "grad_norm": 1.3110318183898926, | |
| "learning_rate": 5.3710646079202e-06, | |
| "loss": 0.3977, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 1.0592, | |
| "grad_norm": 1.6953034400939941, | |
| "learning_rate": 5.357139048277422e-06, | |
| "loss": 0.3988, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 1.0608, | |
| "grad_norm": 2.0533010959625244, | |
| "learning_rate": 5.343210703592604e-06, | |
| "loss": 0.4129, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 1.0624, | |
| "grad_norm": 1.579426884651184, | |
| "learning_rate": 5.329279682481776e-06, | |
| "loss": 0.4507, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 1.064, | |
| "grad_norm": 2.018475294113159, | |
| "learning_rate": 5.3153460935818405e-06, | |
| "loss": 0.3918, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 1.0656, | |
| "grad_norm": 1.6805697679519653, | |
| "learning_rate": 5.301410045549719e-06, | |
| "loss": 0.3959, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 1.0672, | |
| "grad_norm": 1.7774490118026733, | |
| "learning_rate": 5.287471647061515e-06, | |
| "loss": 0.3967, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 1.0688, | |
| "grad_norm": 1.4479763507843018, | |
| "learning_rate": 5.2735310068116605e-06, | |
| "loss": 0.3809, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 1.0704, | |
| "grad_norm": 1.5567115545272827, | |
| "learning_rate": 5.25958823351207e-06, | |
| "loss": 0.3624, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 1.072, | |
| "grad_norm": 1.5630004405975342, | |
| "learning_rate": 5.2456434358912865e-06, | |
| "loss": 0.3829, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 1.0735999999999999, | |
| "grad_norm": 2.1382017135620117, | |
| "learning_rate": 5.2316967226936454e-06, | |
| "loss": 0.3991, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 1.0752, | |
| "grad_norm": 1.3570510149002075, | |
| "learning_rate": 5.21774820267842e-06, | |
| "loss": 0.3874, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 1.0768, | |
| "grad_norm": 1.4608575105667114, | |
| "learning_rate": 5.2037979846189655e-06, | |
| "loss": 0.3264, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 1.0784, | |
| "grad_norm": 1.7318300008773804, | |
| "learning_rate": 5.189846177301892e-06, | |
| "loss": 0.3789, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 1.5518624782562256, | |
| "learning_rate": 5.175892889526189e-06, | |
| "loss": 0.3965, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 1.0816, | |
| "grad_norm": 1.37949800491333, | |
| "learning_rate": 5.1619382301024025e-06, | |
| "loss": 0.3874, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 1.0832, | |
| "grad_norm": 1.4376988410949707, | |
| "learning_rate": 5.147982307851766e-06, | |
| "loss": 0.3885, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 1.0848, | |
| "grad_norm": 1.5301703214645386, | |
| "learning_rate": 5.1340252316053686e-06, | |
| "loss": 0.394, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 1.0864, | |
| "grad_norm": 1.494070291519165, | |
| "learning_rate": 5.120067110203289e-06, | |
| "loss": 0.3776, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 1.088, | |
| "grad_norm": 1.757067322731018, | |
| "learning_rate": 5.106108052493768e-06, | |
| "loss": 0.4614, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 1.0896, | |
| "grad_norm": 1.7171550989151, | |
| "learning_rate": 5.092148167332338e-06, | |
| "loss": 0.3886, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 1.0912, | |
| "grad_norm": 1.5254855155944824, | |
| "learning_rate": 5.078187563580988e-06, | |
| "loss": 0.3585, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 1.0928, | |
| "grad_norm": 1.4783705472946167, | |
| "learning_rate": 5.0642263501073096e-06, | |
| "loss": 0.4069, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 1.0944, | |
| "grad_norm": 1.6454366445541382, | |
| "learning_rate": 5.050264635783654e-06, | |
| "loss": 0.3567, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 1.096, | |
| "grad_norm": 1.437853455543518, | |
| "learning_rate": 5.03630252948627e-06, | |
| "loss": 0.418, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 1.0976, | |
| "grad_norm": 1.7114819288253784, | |
| "learning_rate": 5.022340140094469e-06, | |
| "loss": 0.4016, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 1.0992, | |
| "grad_norm": 1.3523861169815063, | |
| "learning_rate": 5.008377576489769e-06, | |
| "loss": 0.3856, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 1.1008, | |
| "grad_norm": 1.635858178138733, | |
| "learning_rate": 4.994414947555043e-06, | |
| "loss": 0.3695, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 1.1024, | |
| "grad_norm": 1.4207016229629517, | |
| "learning_rate": 4.980452362173676e-06, | |
| "loss": 0.3725, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 1.104, | |
| "grad_norm": 1.388056755065918, | |
| "learning_rate": 4.966489929228721e-06, | |
| "loss": 0.3848, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 1.1056, | |
| "grad_norm": 1.8015445470809937, | |
| "learning_rate": 4.952527757602025e-06, | |
| "loss": 0.4117, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 1.1072, | |
| "grad_norm": 1.9705373048782349, | |
| "learning_rate": 4.938565956173413e-06, | |
| "loss": 0.3946, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 1.1088, | |
| "grad_norm": 1.8069337606430054, | |
| "learning_rate": 4.924604633819815e-06, | |
| "loss": 0.3605, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 1.1104, | |
| "grad_norm": 1.3212165832519531, | |
| "learning_rate": 4.910643899414429e-06, | |
| "loss": 0.3576, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 1.112, | |
| "grad_norm": 1.6028156280517578, | |
| "learning_rate": 4.896683861825863e-06, | |
| "loss": 0.4003, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 1.1136, | |
| "grad_norm": 2.245041847229004, | |
| "learning_rate": 4.882724629917298e-06, | |
| "loss": 0.4175, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 1.1152, | |
| "grad_norm": 1.6626189947128296, | |
| "learning_rate": 4.868766312545627e-06, | |
| "loss": 0.3948, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 1.1168, | |
| "grad_norm": 1.6258718967437744, | |
| "learning_rate": 4.854809018560611e-06, | |
| "loss": 0.4019, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 1.1184, | |
| "grad_norm": 1.3411015272140503, | |
| "learning_rate": 4.8408528568040365e-06, | |
| "loss": 0.4035, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 1.4541879892349243, | |
| "learning_rate": 4.826897936108853e-06, | |
| "loss": 0.3749, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 1.1216, | |
| "grad_norm": 1.7652925252914429, | |
| "learning_rate": 4.812944365298337e-06, | |
| "loss": 0.425, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 1.1232, | |
| "grad_norm": 1.4005472660064697, | |
| "learning_rate": 4.798992253185233e-06, | |
| "loss": 0.3989, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 1.1248, | |
| "grad_norm": 1.5522427558898926, | |
| "learning_rate": 4.785041708570921e-06, | |
| "loss": 0.3766, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 1.1264, | |
| "grad_norm": 1.5371007919311523, | |
| "learning_rate": 4.771092840244544e-06, | |
| "loss": 0.4063, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 1.1280000000000001, | |
| "grad_norm": 1.7128517627716064, | |
| "learning_rate": 4.757145756982182e-06, | |
| "loss": 0.3946, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 1.1296, | |
| "grad_norm": 1.8473058938980103, | |
| "learning_rate": 4.7432005675459905e-06, | |
| "loss": 0.3914, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 1.1312, | |
| "grad_norm": 1.8506437540054321, | |
| "learning_rate": 4.7292573806833605e-06, | |
| "loss": 0.4061, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 1.1328, | |
| "grad_norm": 1.8255748748779297, | |
| "learning_rate": 4.715316305126059e-06, | |
| "loss": 0.3865, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 1.1344, | |
| "grad_norm": 1.5763165950775146, | |
| "learning_rate": 4.7013774495894e-06, | |
| "loss": 0.3665, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 1.1360000000000001, | |
| "grad_norm": 1.1992326974868774, | |
| "learning_rate": 4.687440922771376e-06, | |
| "loss": 0.4032, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 1.1376, | |
| "grad_norm": 1.7682344913482666, | |
| "learning_rate": 4.673506833351821e-06, | |
| "loss": 0.4186, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 1.1392, | |
| "grad_norm": 1.4362800121307373, | |
| "learning_rate": 4.659575289991567e-06, | |
| "loss": 0.4006, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 1.1408, | |
| "grad_norm": 1.6189665794372559, | |
| "learning_rate": 4.645646401331585e-06, | |
| "loss": 0.3985, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 1.1424, | |
| "grad_norm": 1.5149279832839966, | |
| "learning_rate": 4.631720275992148e-06, | |
| "loss": 0.3693, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 1.144, | |
| "grad_norm": 1.9649088382720947, | |
| "learning_rate": 4.617797022571977e-06, | |
| "loss": 0.3529, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 1.1456, | |
| "grad_norm": 1.4034981727600098, | |
| "learning_rate": 4.603876749647404e-06, | |
| "loss": 0.4014, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 1.1472, | |
| "grad_norm": 1.5938494205474854, | |
| "learning_rate": 4.589959565771505e-06, | |
| "loss": 0.3501, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 1.1488, | |
| "grad_norm": 1.5167361497879028, | |
| "learning_rate": 4.576045579473284e-06, | |
| "loss": 0.3622, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 1.1504, | |
| "grad_norm": 1.3950307369232178, | |
| "learning_rate": 4.562134899256797e-06, | |
| "loss": 0.3664, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 1.152, | |
| "grad_norm": 1.5592819452285767, | |
| "learning_rate": 4.548227633600322e-06, | |
| "loss": 0.4006, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 1.1536, | |
| "grad_norm": 1.5067250728607178, | |
| "learning_rate": 4.534323890955514e-06, | |
| "loss": 0.3676, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 1.1552, | |
| "grad_norm": 1.6111873388290405, | |
| "learning_rate": 4.520423779746547e-06, | |
| "loss": 0.4192, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 1.1568, | |
| "grad_norm": 1.3248369693756104, | |
| "learning_rate": 4.506527408369285e-06, | |
| "loss": 0.4106, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 1.1584, | |
| "grad_norm": 1.4361342191696167, | |
| "learning_rate": 4.492634885190417e-06, | |
| "loss": 0.3802, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "grad_norm": 1.3219597339630127, | |
| "learning_rate": 4.478746318546636e-06, | |
| "loss": 0.4287, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 1.1616, | |
| "grad_norm": 1.4369667768478394, | |
| "learning_rate": 4.46486181674377e-06, | |
| "loss": 0.3821, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 1.1632, | |
| "grad_norm": 1.8185745477676392, | |
| "learning_rate": 4.450981488055957e-06, | |
| "loss": 0.3795, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 1.1648, | |
| "grad_norm": 1.7907580137252808, | |
| "learning_rate": 4.437105440724785e-06, | |
| "loss": 0.3967, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 1.1663999999999999, | |
| "grad_norm": 1.4447728395462036, | |
| "learning_rate": 4.423233782958459e-06, | |
| "loss": 0.3706, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 1.168, | |
| "grad_norm": 1.7561380863189697, | |
| "learning_rate": 4.409366622930955e-06, | |
| "loss": 0.3789, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 1.1696, | |
| "grad_norm": 1.9225605726242065, | |
| "learning_rate": 4.395504068781171e-06, | |
| "loss": 0.3958, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 1.1712, | |
| "grad_norm": 1.483598232269287, | |
| "learning_rate": 4.38164622861209e-06, | |
| "loss": 0.3897, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 1.1728, | |
| "grad_norm": 1.2621132135391235, | |
| "learning_rate": 4.36779321048993e-06, | |
| "loss": 0.3899, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 1.1743999999999999, | |
| "grad_norm": 1.4648138284683228, | |
| "learning_rate": 4.353945122443314e-06, | |
| "loss": 0.3961, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 1.176, | |
| "grad_norm": 1.5951130390167236, | |
| "learning_rate": 4.340102072462411e-06, | |
| "loss": 0.3834, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 1.1776, | |
| "grad_norm": 1.85948646068573, | |
| "learning_rate": 4.326264168498106e-06, | |
| "loss": 0.3873, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 1.1792, | |
| "grad_norm": 1.5866525173187256, | |
| "learning_rate": 4.312431518461154e-06, | |
| "loss": 0.4175, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 1.1808, | |
| "grad_norm": 1.5731794834136963, | |
| "learning_rate": 4.298604230221341e-06, | |
| "loss": 0.3492, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 1.1824, | |
| "grad_norm": 1.3170596361160278, | |
| "learning_rate": 4.284782411606635e-06, | |
| "loss": 0.3574, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 1.184, | |
| "grad_norm": 1.2995411157608032, | |
| "learning_rate": 4.270966170402354e-06, | |
| "loss": 0.4199, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 1.1856, | |
| "grad_norm": 1.9874039888381958, | |
| "learning_rate": 4.2571556143503275e-06, | |
| "loss": 0.3708, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 1.1872, | |
| "grad_norm": 1.4969028234481812, | |
| "learning_rate": 4.243350851148039e-06, | |
| "loss": 0.3772, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 1.1888, | |
| "grad_norm": 1.5935897827148438, | |
| "learning_rate": 4.229551988447809e-06, | |
| "loss": 0.3996, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 1.1904, | |
| "grad_norm": 1.4645673036575317, | |
| "learning_rate": 4.21575913385594e-06, | |
| "loss": 0.3947, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 1.192, | |
| "grad_norm": 1.8441481590270996, | |
| "learning_rate": 4.201972394931883e-06, | |
| "loss": 0.3492, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 1.1936, | |
| "grad_norm": 1.6657288074493408, | |
| "learning_rate": 4.188191879187395e-06, | |
| "loss": 0.3733, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 1.1952, | |
| "grad_norm": 1.7471777200698853, | |
| "learning_rate": 4.174417694085711e-06, | |
| "loss": 0.4058, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 1.1968, | |
| "grad_norm": 1.3249934911727905, | |
| "learning_rate": 4.1606499470406885e-06, | |
| "loss": 0.3961, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 1.1984, | |
| "grad_norm": 1.8512537479400635, | |
| "learning_rate": 4.146888745415988e-06, | |
| "loss": 0.3814, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 1.741398572921753, | |
| "learning_rate": 4.133134196524221e-06, | |
| "loss": 0.3704, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 1.2016, | |
| "grad_norm": 1.5396209955215454, | |
| "learning_rate": 4.119386407626126e-06, | |
| "loss": 0.4466, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 1.2032, | |
| "grad_norm": 1.612077236175537, | |
| "learning_rate": 4.105645485929721e-06, | |
| "loss": 0.3982, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 1.2048, | |
| "grad_norm": 1.978322982788086, | |
| "learning_rate": 4.091911538589474e-06, | |
| "loss": 0.3747, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 1.2064, | |
| "grad_norm": 1.7665278911590576, | |
| "learning_rate": 4.078184672705465e-06, | |
| "loss": 0.373, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 1.208, | |
| "grad_norm": 1.5830265283584595, | |
| "learning_rate": 4.064464995322549e-06, | |
| "loss": 0.394, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 1.2096, | |
| "grad_norm": 1.9034477472305298, | |
| "learning_rate": 4.0507526134295314e-06, | |
| "loss": 0.3909, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 1.2112, | |
| "grad_norm": 1.2925678491592407, | |
| "learning_rate": 4.037047633958317e-06, | |
| "loss": 0.3933, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 1.2128, | |
| "grad_norm": 1.6661574840545654, | |
| "learning_rate": 4.0233501637830905e-06, | |
| "loss": 0.3954, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 1.2144, | |
| "grad_norm": 1.2100130319595337, | |
| "learning_rate": 4.009660309719473e-06, | |
| "loss": 0.3583, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 1.216, | |
| "grad_norm": 1.683942198753357, | |
| "learning_rate": 3.9959781785237e-06, | |
| "loss": 0.4512, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 1.2176, | |
| "grad_norm": 1.2914611101150513, | |
| "learning_rate": 3.982303876891778e-06, | |
| "loss": 0.3922, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 1.2192, | |
| "grad_norm": 1.4684218168258667, | |
| "learning_rate": 3.968637511458657e-06, | |
| "loss": 0.4232, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 1.2208, | |
| "grad_norm": 1.8208807706832886, | |
| "learning_rate": 3.954979188797402e-06, | |
| "loss": 0.4113, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 1.2224, | |
| "grad_norm": 1.4767571687698364, | |
| "learning_rate": 3.9413290154183536e-06, | |
| "loss": 0.3681, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 1.224, | |
| "grad_norm": 1.5882097482681274, | |
| "learning_rate": 3.927687097768309e-06, | |
| "loss": 0.3759, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 1.2256, | |
| "grad_norm": 1.4595040082931519, | |
| "learning_rate": 3.91405354222968e-06, | |
| "loss": 0.3872, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 1.2272, | |
| "grad_norm": 2.047941207885742, | |
| "learning_rate": 3.900428455119674e-06, | |
| "loss": 0.4178, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 1.2288000000000001, | |
| "grad_norm": 1.8350028991699219, | |
| "learning_rate": 3.886811942689453e-06, | |
| "loss": 0.4165, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 1.2304, | |
| "grad_norm": 1.148192048072815, | |
| "learning_rate": 3.873204111123321e-06, | |
| "loss": 0.3798, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 1.232, | |
| "grad_norm": 1.4587918519973755, | |
| "learning_rate": 3.859605066537879e-06, | |
| "loss": 0.3696, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 1.2336, | |
| "grad_norm": 2.493159055709839, | |
| "learning_rate": 3.846014914981209e-06, | |
| "loss": 0.4163, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 1.2352, | |
| "grad_norm": 1.6643836498260498, | |
| "learning_rate": 3.832433762432044e-06, | |
| "loss": 0.3698, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 1.2368000000000001, | |
| "grad_norm": 1.5414241552352905, | |
| "learning_rate": 3.818861714798939e-06, | |
| "loss": 0.398, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 1.2384, | |
| "grad_norm": 1.6924750804901123, | |
| "learning_rate": 3.8052988779194478e-06, | |
| "loss": 0.3765, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "grad_norm": 1.922426700592041, | |
| "learning_rate": 3.7917453575592956e-06, | |
| "loss": 0.3839, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 1.2416, | |
| "grad_norm": 2.092884063720703, | |
| "learning_rate": 3.77820125941156e-06, | |
| "loss": 0.4134, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 1.2432, | |
| "grad_norm": 1.6445622444152832, | |
| "learning_rate": 3.764666689095835e-06, | |
| "loss": 0.4167, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 1.2448, | |
| "grad_norm": 1.703126072883606, | |
| "learning_rate": 3.751141752157423e-06, | |
| "loss": 0.4038, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 1.2464, | |
| "grad_norm": 2.015972852706909, | |
| "learning_rate": 3.737626554066495e-06, | |
| "loss": 0.4068, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 1.248, | |
| "grad_norm": 1.5350987911224365, | |
| "learning_rate": 3.7241212002172846e-06, | |
| "loss": 0.3884, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 1.2496, | |
| "grad_norm": 1.4118163585662842, | |
| "learning_rate": 3.710625795927249e-06, | |
| "loss": 0.3932, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 1.2511999999999999, | |
| "grad_norm": 1.4548838138580322, | |
| "learning_rate": 3.6971404464362657e-06, | |
| "loss": 0.3869, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 1.2528000000000001, | |
| "grad_norm": 1.5402504205703735, | |
| "learning_rate": 3.6836652569057994e-06, | |
| "loss": 0.3792, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 1.2544, | |
| "grad_norm": 1.3581764698028564, | |
| "learning_rate": 3.6702003324180823e-06, | |
| "loss": 0.3364, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 1.256, | |
| "grad_norm": 1.3557631969451904, | |
| "learning_rate": 3.656745777975303e-06, | |
| "loss": 0.4116, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 1.2576, | |
| "grad_norm": 1.7553138732910156, | |
| "learning_rate": 3.6433016984987774e-06, | |
| "loss": 0.3799, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 1.2591999999999999, | |
| "grad_norm": 1.6802467107772827, | |
| "learning_rate": 3.6298681988281405e-06, | |
| "loss": 0.3984, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 1.2608, | |
| "grad_norm": 1.4376049041748047, | |
| "learning_rate": 3.616445383720517e-06, | |
| "loss": 0.4052, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 1.2624, | |
| "grad_norm": 1.6305116415023804, | |
| "learning_rate": 3.6030333578497213e-06, | |
| "loss": 0.3935, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 1.264, | |
| "grad_norm": 1.5937840938568115, | |
| "learning_rate": 3.589632225805419e-06, | |
| "loss": 0.3497, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 1.2656, | |
| "grad_norm": 1.9702191352844238, | |
| "learning_rate": 3.576242092092334e-06, | |
| "loss": 0.3834, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 1.2671999999999999, | |
| "grad_norm": 1.6501344442367554, | |
| "learning_rate": 3.562863061129419e-06, | |
| "loss": 0.3627, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 1.2688, | |
| "grad_norm": 1.564285397529602, | |
| "learning_rate": 3.549495237249042e-06, | |
| "loss": 0.3889, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 1.2704, | |
| "grad_norm": 1.3608351945877075, | |
| "learning_rate": 3.536138724696182e-06, | |
| "loss": 0.4151, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 1.272, | |
| "grad_norm": 1.9222168922424316, | |
| "learning_rate": 3.5227936276276055e-06, | |
| "loss": 0.4011, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 1.2736, | |
| "grad_norm": 1.614746332168579, | |
| "learning_rate": 3.509460050111061e-06, | |
| "loss": 0.373, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 1.2752, | |
| "grad_norm": 1.7889058589935303, | |
| "learning_rate": 3.4961380961244605e-06, | |
| "loss": 0.3885, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 1.2768, | |
| "grad_norm": 1.7658660411834717, | |
| "learning_rate": 3.4828278695550845e-06, | |
| "loss": 0.4161, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 1.2784, | |
| "grad_norm": 1.7985132932662964, | |
| "learning_rate": 3.4695294741987474e-06, | |
| "loss": 0.3882, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 1.746675968170166, | |
| "learning_rate": 3.4562430137590107e-06, | |
| "loss": 0.4179, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 1.2816, | |
| "grad_norm": 1.769290566444397, | |
| "learning_rate": 3.442968591846359e-06, | |
| "loss": 0.3897, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 1.2832, | |
| "grad_norm": 1.8828134536743164, | |
| "learning_rate": 3.4297063119774037e-06, | |
| "loss": 0.3949, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 1.2848, | |
| "grad_norm": 1.723960041999817, | |
| "learning_rate": 3.416456277574068e-06, | |
| "loss": 0.3852, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 1.2864, | |
| "grad_norm": 1.3766586780548096, | |
| "learning_rate": 3.4032185919627784e-06, | |
| "loss": 0.3709, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 1.288, | |
| "grad_norm": 1.7958343029022217, | |
| "learning_rate": 3.38999335837367e-06, | |
| "loss": 0.4056, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 1.2896, | |
| "grad_norm": 1.3539986610412598, | |
| "learning_rate": 3.376780679939767e-06, | |
| "loss": 0.4276, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 1.2912, | |
| "grad_norm": 1.7370043992996216, | |
| "learning_rate": 3.363580659696194e-06, | |
| "loss": 0.4073, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 1.2928, | |
| "grad_norm": 1.4151561260223389, | |
| "learning_rate": 3.350393400579358e-06, | |
| "loss": 0.4318, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 1.2944, | |
| "grad_norm": 1.4391050338745117, | |
| "learning_rate": 3.3372190054261565e-06, | |
| "loss": 0.3826, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 1.296, | |
| "grad_norm": 1.6209253072738647, | |
| "learning_rate": 3.3240575769731662e-06, | |
| "loss": 0.4122, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 1.2976, | |
| "grad_norm": 1.3858795166015625, | |
| "learning_rate": 3.3109092178558546e-06, | |
| "loss": 0.4157, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 1.2992, | |
| "grad_norm": 1.376095175743103, | |
| "learning_rate": 3.297774030607763e-06, | |
| "loss": 0.4067, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 1.3008, | |
| "grad_norm": 1.2436944246292114, | |
| "learning_rate": 3.2846521176597217e-06, | |
| "loss": 0.3768, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 1.3024, | |
| "grad_norm": 1.7928731441497803, | |
| "learning_rate": 3.271543581339047e-06, | |
| "loss": 0.4157, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 1.304, | |
| "grad_norm": 1.6191116571426392, | |
| "learning_rate": 3.2584485238687318e-06, | |
| "loss": 0.3609, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 1.3056, | |
| "grad_norm": 1.6471881866455078, | |
| "learning_rate": 3.245367047366671e-06, | |
| "loss": 0.3756, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 1.3072, | |
| "grad_norm": 1.488052248954773, | |
| "learning_rate": 3.2322992538448418e-06, | |
| "loss": 0.3511, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 1.3088, | |
| "grad_norm": 1.9696465730667114, | |
| "learning_rate": 3.2192452452085265e-06, | |
| "loss": 0.4224, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 1.3104, | |
| "grad_norm": 1.5203955173492432, | |
| "learning_rate": 3.2062051232555024e-06, | |
| "loss": 0.3366, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 1.312, | |
| "grad_norm": 1.840429425239563, | |
| "learning_rate": 3.1931789896752654e-06, | |
| "loss": 0.4006, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 1.3136, | |
| "grad_norm": 2.4381566047668457, | |
| "learning_rate": 3.1801669460482176e-06, | |
| "loss": 0.4083, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 1.3152, | |
| "grad_norm": 2.0025670528411865, | |
| "learning_rate": 3.1671690938448895e-06, | |
| "loss": 0.3956, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 1.3168, | |
| "grad_norm": 1.1892062425613403, | |
| "learning_rate": 3.154185534425147e-06, | |
| "loss": 0.397, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 1.3184, | |
| "grad_norm": 1.4366689920425415, | |
| "learning_rate": 3.141216369037391e-06, | |
| "loss": 0.3643, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "grad_norm": 1.8570374250411987, | |
| "learning_rate": 3.1282616988177806e-06, | |
| "loss": 0.4241, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 1.3216, | |
| "grad_norm": 1.8865070343017578, | |
| "learning_rate": 3.115321624789433e-06, | |
| "loss": 0.399, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 1.3232, | |
| "grad_norm": 1.5761024951934814, | |
| "learning_rate": 3.102396247861651e-06, | |
| "loss": 0.3712, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 1.3248, | |
| "grad_norm": 1.6456334590911865, | |
| "learning_rate": 3.089485668829113e-06, | |
| "loss": 0.3637, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 1.3264, | |
| "grad_norm": 1.2593110799789429, | |
| "learning_rate": 3.0765899883711148e-06, | |
| "loss": 0.3902, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 1.328, | |
| "grad_norm": 1.481000542640686, | |
| "learning_rate": 3.063709307050757e-06, | |
| "loss": 0.3586, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 1.3296000000000001, | |
| "grad_norm": 1.4595977067947388, | |
| "learning_rate": 3.0508437253141855e-06, | |
| "loss": 0.3594, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 1.3312, | |
| "grad_norm": 1.5355987548828125, | |
| "learning_rate": 3.0379933434897846e-06, | |
| "loss": 0.384, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 1.3328, | |
| "grad_norm": 2.011629581451416, | |
| "learning_rate": 3.0251582617874187e-06, | |
| "loss": 0.3993, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 1.3344, | |
| "grad_norm": 1.8381239175796509, | |
| "learning_rate": 3.0123385802976323e-06, | |
| "loss": 0.3728, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 1.336, | |
| "grad_norm": 1.9025415182113647, | |
| "learning_rate": 2.9995343989908743e-06, | |
| "loss": 0.3801, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 1.3376000000000001, | |
| "grad_norm": 1.5767295360565186, | |
| "learning_rate": 2.986745817716725e-06, | |
| "loss": 0.3896, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 1.3392, | |
| "grad_norm": 1.532058596611023, | |
| "learning_rate": 2.97397293620311e-06, | |
| "loss": 0.3632, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 1.3408, | |
| "grad_norm": 1.7929993867874146, | |
| "learning_rate": 2.9612158540555245e-06, | |
| "loss": 0.4157, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 1.3424, | |
| "grad_norm": 1.456510066986084, | |
| "learning_rate": 2.9484746707562573e-06, | |
| "loss": 0.3536, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 1.3439999999999999, | |
| "grad_norm": 1.5640872716903687, | |
| "learning_rate": 2.935749485663616e-06, | |
| "loss": 0.3516, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 1.3456000000000001, | |
| "grad_norm": 1.582572340965271, | |
| "learning_rate": 2.9230403980111482e-06, | |
| "loss": 0.3615, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 1.3472, | |
| "grad_norm": 1.4674878120422363, | |
| "learning_rate": 2.9103475069068763e-06, | |
| "loss": 0.4103, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 1.3488, | |
| "grad_norm": 1.8493053913116455, | |
| "learning_rate": 2.8976709113325107e-06, | |
| "loss": 0.4272, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 1.3504, | |
| "grad_norm": 1.8366992473602295, | |
| "learning_rate": 2.8850107101426916e-06, | |
| "loss": 0.4172, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 1.3519999999999999, | |
| "grad_norm": 1.6848294734954834, | |
| "learning_rate": 2.8723670020642137e-06, | |
| "loss": 0.3844, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 1.3536000000000001, | |
| "grad_norm": 1.5842174291610718, | |
| "learning_rate": 2.8597398856952473e-06, | |
| "loss": 0.3903, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 1.3552, | |
| "grad_norm": 1.3894240856170654, | |
| "learning_rate": 2.8471294595045886e-06, | |
| "loss": 0.4115, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 1.3568, | |
| "grad_norm": 1.5545260906219482, | |
| "learning_rate": 2.83453582183087e-06, | |
| "loss": 0.4062, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 1.3584, | |
| "grad_norm": 1.8124723434448242, | |
| "learning_rate": 2.821959070881809e-06, | |
| "loss": 0.4054, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 1.9200547933578491, | |
| "learning_rate": 2.8093993047334333e-06, | |
| "loss": 0.3689, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 1.3616, | |
| "grad_norm": 1.4908267259597778, | |
| "learning_rate": 2.7968566213293276e-06, | |
| "loss": 0.358, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 1.3632, | |
| "grad_norm": 1.7908552885055542, | |
| "learning_rate": 2.784331118479851e-06, | |
| "loss": 0.4138, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 1.3648, | |
| "grad_norm": 1.5923010110855103, | |
| "learning_rate": 2.7718228938613955e-06, | |
| "loss": 0.3836, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 1.3664, | |
| "grad_norm": 1.8634357452392578, | |
| "learning_rate": 2.759332045015608e-06, | |
| "loss": 0.3334, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 1.3679999999999999, | |
| "grad_norm": 1.697973608970642, | |
| "learning_rate": 2.746858669348634e-06, | |
| "loss": 0.4082, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 1.3696, | |
| "grad_norm": 1.489332675933838, | |
| "learning_rate": 2.7344028641303667e-06, | |
| "loss": 0.3809, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 1.3712, | |
| "grad_norm": 1.5490901470184326, | |
| "learning_rate": 2.7219647264936733e-06, | |
| "loss": 0.3441, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 1.3728, | |
| "grad_norm": 1.6636728048324585, | |
| "learning_rate": 2.7095443534336545e-06, | |
| "loss": 0.3795, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 1.3744, | |
| "grad_norm": 1.7879304885864258, | |
| "learning_rate": 2.6971418418068696e-06, | |
| "loss": 0.4198, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 1.376, | |
| "grad_norm": 1.7500596046447754, | |
| "learning_rate": 2.6847572883305993e-06, | |
| "loss": 0.3944, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 1.3776, | |
| "grad_norm": 1.3904556035995483, | |
| "learning_rate": 2.672390789582079e-06, | |
| "loss": 0.3777, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 1.3792, | |
| "grad_norm": 1.8996713161468506, | |
| "learning_rate": 2.660042441997748e-06, | |
| "loss": 0.3758, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 1.3808, | |
| "grad_norm": 1.7194160223007202, | |
| "learning_rate": 2.647712341872501e-06, | |
| "loss": 0.4321, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 1.3824, | |
| "grad_norm": 1.6117509603500366, | |
| "learning_rate": 2.635400585358937e-06, | |
| "loss": 0.3732, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 1.384, | |
| "grad_norm": 1.4354063272476196, | |
| "learning_rate": 2.623107268466608e-06, | |
| "loss": 0.3752, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 1.3856, | |
| "grad_norm": 1.9995311498641968, | |
| "learning_rate": 2.6108324870612674e-06, | |
| "loss": 0.381, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 1.3872, | |
| "grad_norm": 1.6283619403839111, | |
| "learning_rate": 2.5985763368641253e-06, | |
| "loss": 0.4, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 1.3888, | |
| "grad_norm": 1.734034776687622, | |
| "learning_rate": 2.5863389134511024e-06, | |
| "loss": 0.3658, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 1.3904, | |
| "grad_norm": 1.5603657960891724, | |
| "learning_rate": 2.5741203122520876e-06, | |
| "loss": 0.3482, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 1.392, | |
| "grad_norm": 1.770123839378357, | |
| "learning_rate": 2.561920628550184e-06, | |
| "loss": 0.3679, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 1.3936, | |
| "grad_norm": 1.7462615966796875, | |
| "learning_rate": 2.549739957480979e-06, | |
| "loss": 0.4488, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 1.3952, | |
| "grad_norm": 1.3405978679656982, | |
| "learning_rate": 2.53757839403179e-06, | |
| "loss": 0.3691, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 1.3968, | |
| "grad_norm": 1.5611436367034912, | |
| "learning_rate": 2.5254360330409343e-06, | |
| "loss": 0.4053, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 1.3984, | |
| "grad_norm": 1.5699836015701294, | |
| "learning_rate": 2.5133129691969806e-06, | |
| "loss": 0.3623, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "grad_norm": 1.4957606792449951, | |
| "learning_rate": 2.501209297038014e-06, | |
| "loss": 0.4139, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 1.4016, | |
| "grad_norm": 1.471581220626831, | |
| "learning_rate": 2.4891251109509053e-06, | |
| "loss": 0.3543, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 1.4032, | |
| "grad_norm": 1.7124475240707397, | |
| "learning_rate": 2.477060505170561e-06, | |
| "loss": 0.3874, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 1.4048, | |
| "grad_norm": 1.382944107055664, | |
| "learning_rate": 2.465015573779205e-06, | |
| "loss": 0.3609, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 1.4064, | |
| "grad_norm": 2.0033528804779053, | |
| "learning_rate": 2.452990410705629e-06, | |
| "loss": 0.3613, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 1.408, | |
| "grad_norm": 1.6155847311019897, | |
| "learning_rate": 2.4409851097244708e-06, | |
| "loss": 0.3761, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 1.4096, | |
| "grad_norm": 1.731706142425537, | |
| "learning_rate": 2.4289997644554775e-06, | |
| "loss": 0.3562, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 1.4112, | |
| "grad_norm": 1.7755978107452393, | |
| "learning_rate": 2.417034468362782e-06, | |
| "loss": 0.4038, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 1.4128, | |
| "grad_norm": 1.3863284587860107, | |
| "learning_rate": 2.4050893147541643e-06, | |
| "loss": 0.3602, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 1.4144, | |
| "grad_norm": 1.659938097000122, | |
| "learning_rate": 2.393164396780332e-06, | |
| "loss": 0.4019, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 1.416, | |
| "grad_norm": 1.3707387447357178, | |
| "learning_rate": 2.381259807434194e-06, | |
| "loss": 0.3726, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 1.4176, | |
| "grad_norm": 1.4226531982421875, | |
| "learning_rate": 2.369375639550127e-06, | |
| "loss": 0.3748, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 1.4192, | |
| "grad_norm": 1.5829614400863647, | |
| "learning_rate": 2.3575119858032604e-06, | |
| "loss": 0.4063, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 1.4208, | |
| "grad_norm": 1.7366124391555786, | |
| "learning_rate": 2.345668938708746e-06, | |
| "loss": 0.3864, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 1.4224, | |
| "grad_norm": 1.6385079622268677, | |
| "learning_rate": 2.333846590621049e-06, | |
| "loss": 0.407, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 1.424, | |
| "grad_norm": 1.4299626350402832, | |
| "learning_rate": 2.3220450337332097e-06, | |
| "loss": 0.3604, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 1.4256, | |
| "grad_norm": 1.7684646844863892, | |
| "learning_rate": 2.3102643600761445e-06, | |
| "loss": 0.3902, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 1.4272, | |
| "grad_norm": 1.393120527267456, | |
| "learning_rate": 2.2985046615179098e-06, | |
| "loss": 0.3873, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 1.4288, | |
| "grad_norm": 1.520400881767273, | |
| "learning_rate": 2.2867660297629977e-06, | |
| "loss": 0.4232, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 1.4304000000000001, | |
| "grad_norm": 1.9769854545593262, | |
| "learning_rate": 2.2750485563516154e-06, | |
| "loss": 0.4252, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 1.432, | |
| "grad_norm": 1.915168285369873, | |
| "learning_rate": 2.263352332658976e-06, | |
| "loss": 0.3505, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 1.4336, | |
| "grad_norm": 1.3965888023376465, | |
| "learning_rate": 2.251677449894583e-06, | |
| "loss": 0.3546, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 1.4352, | |
| "grad_norm": 1.7047879695892334, | |
| "learning_rate": 2.2400239991015144e-06, | |
| "loss": 0.3979, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 1.4368, | |
| "grad_norm": 1.9038892984390259, | |
| "learning_rate": 2.2283920711557226e-06, | |
| "loss": 0.3405, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 1.4384000000000001, | |
| "grad_norm": 1.531741976737976, | |
| "learning_rate": 2.2167817567653176e-06, | |
| "loss": 0.3735, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 1.4564132690429688, | |
| "learning_rate": 2.2051931464698636e-06, | |
| "loss": 0.3903, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 1.4416, | |
| "grad_norm": 1.9366191625595093, | |
| "learning_rate": 2.1936263306396688e-06, | |
| "loss": 0.362, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 1.4432, | |
| "grad_norm": 1.9327373504638672, | |
| "learning_rate": 2.1820813994750904e-06, | |
| "loss": 0.3991, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 1.4447999999999999, | |
| "grad_norm": 1.4752402305603027, | |
| "learning_rate": 2.170558443005818e-06, | |
| "loss": 0.3725, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 1.4464000000000001, | |
| "grad_norm": 1.606317400932312, | |
| "learning_rate": 2.159057551090184e-06, | |
| "loss": 0.3496, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 1.448, | |
| "grad_norm": 1.4181339740753174, | |
| "learning_rate": 2.1475788134144516e-06, | |
| "loss": 0.3824, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 1.4496, | |
| "grad_norm": 1.7784645557403564, | |
| "learning_rate": 2.1361223194921214e-06, | |
| "loss": 0.3769, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 1.4512, | |
| "grad_norm": 1.7761059999465942, | |
| "learning_rate": 2.1246881586632384e-06, | |
| "loss": 0.385, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 1.4527999999999999, | |
| "grad_norm": 1.8043594360351562, | |
| "learning_rate": 2.113276420093681e-06, | |
| "loss": 0.336, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 1.4544000000000001, | |
| "grad_norm": 1.6478018760681152, | |
| "learning_rate": 2.1018871927744844e-06, | |
| "loss": 0.3611, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 1.456, | |
| "grad_norm": 1.8899405002593994, | |
| "learning_rate": 2.0905205655211257e-06, | |
| "loss": 0.4117, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 1.4576, | |
| "grad_norm": 1.8601652383804321, | |
| "learning_rate": 2.079176626972852e-06, | |
| "loss": 0.3536, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 1.4592, | |
| "grad_norm": 1.590239405632019, | |
| "learning_rate": 2.0678554655919725e-06, | |
| "loss": 0.3854, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 1.4607999999999999, | |
| "grad_norm": 1.4720289707183838, | |
| "learning_rate": 2.056557169663179e-06, | |
| "loss": 0.4058, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 1.4624, | |
| "grad_norm": 1.6574110984802246, | |
| "learning_rate": 2.0452818272928493e-06, | |
| "loss": 0.3804, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 1.464, | |
| "grad_norm": 1.6768255233764648, | |
| "learning_rate": 2.0340295264083716e-06, | |
| "loss": 0.3817, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 1.4656, | |
| "grad_norm": 1.6717270612716675, | |
| "learning_rate": 2.0228003547574488e-06, | |
| "loss": 0.4014, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 1.4672, | |
| "grad_norm": 1.4249584674835205, | |
| "learning_rate": 2.0115943999074167e-06, | |
| "loss": 0.3541, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 1.4687999999999999, | |
| "grad_norm": 1.7832509279251099, | |
| "learning_rate": 2.0004117492445614e-06, | |
| "loss": 0.399, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 1.4704, | |
| "grad_norm": 1.6520204544067383, | |
| "learning_rate": 1.989252489973438e-06, | |
| "loss": 0.3961, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 1.472, | |
| "grad_norm": 1.6940590143203735, | |
| "learning_rate": 1.9781167091161944e-06, | |
| "loss": 0.3484, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 1.4736, | |
| "grad_norm": 2.0652220249176025, | |
| "learning_rate": 1.967004493511884e-06, | |
| "loss": 0.4373, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 1.4752, | |
| "grad_norm": 1.8518260717391968, | |
| "learning_rate": 1.9559159298158e-06, | |
| "loss": 0.3889, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 1.4768, | |
| "grad_norm": 1.7854114770889282, | |
| "learning_rate": 1.9448511044987862e-06, | |
| "loss": 0.3969, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 1.4784, | |
| "grad_norm": 1.561976432800293, | |
| "learning_rate": 1.933810103846575e-06, | |
| "loss": 0.4055, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 1.4357398748397827, | |
| "learning_rate": 1.9227930139591077e-06, | |
| "loss": 0.36, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 1.4816, | |
| "grad_norm": 1.593051552772522, | |
| "learning_rate": 1.911799920749861e-06, | |
| "loss": 0.351, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 1.4832, | |
| "grad_norm": 1.5628341436386108, | |
| "learning_rate": 1.900830909945189e-06, | |
| "loss": 0.4386, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 1.4848, | |
| "grad_norm": 1.9182257652282715, | |
| "learning_rate": 1.8898860670836367e-06, | |
| "loss": 0.3811, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 1.4864, | |
| "grad_norm": 1.701400876045227, | |
| "learning_rate": 1.878965477515291e-06, | |
| "loss": 0.3846, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 1.488, | |
| "grad_norm": 1.534961223602295, | |
| "learning_rate": 1.8680692264011014e-06, | |
| "loss": 0.3817, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 1.4896, | |
| "grad_norm": 1.7734757661819458, | |
| "learning_rate": 1.8571973987122233e-06, | |
| "loss": 0.3586, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 1.4912, | |
| "grad_norm": 1.6101787090301514, | |
| "learning_rate": 1.846350079229351e-06, | |
| "loss": 0.4278, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 1.4928, | |
| "grad_norm": 1.629698395729065, | |
| "learning_rate": 1.8355273525420642e-06, | |
| "loss": 0.4075, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 1.4944, | |
| "grad_norm": 1.737805962562561, | |
| "learning_rate": 1.8247293030481568e-06, | |
| "loss": 0.3809, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 1.496, | |
| "grad_norm": 1.8816227912902832, | |
| "learning_rate": 1.81395601495299e-06, | |
| "loss": 0.3975, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 1.4976, | |
| "grad_norm": 1.781559705734253, | |
| "learning_rate": 1.803207572268826e-06, | |
| "loss": 0.3993, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 1.4992, | |
| "grad_norm": 1.4990744590759277, | |
| "learning_rate": 1.7924840588141829e-06, | |
| "loss": 0.3388, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 1.5008, | |
| "grad_norm": 1.7740086317062378, | |
| "learning_rate": 1.781785558213172e-06, | |
| "loss": 0.3967, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 1.5024, | |
| "grad_norm": 1.514217495918274, | |
| "learning_rate": 1.7711121538948473e-06, | |
| "loss": 0.3962, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 1.504, | |
| "grad_norm": 1.685247540473938, | |
| "learning_rate": 1.760463929092564e-06, | |
| "loss": 0.387, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 1.5056, | |
| "grad_norm": 1.7199534177780151, | |
| "learning_rate": 1.7498409668433135e-06, | |
| "loss": 0.4118, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 1.5072, | |
| "grad_norm": 1.865826964378357, | |
| "learning_rate": 1.7392433499870941e-06, | |
| "loss": 0.3686, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 1.5088, | |
| "grad_norm": 1.8179106712341309, | |
| "learning_rate": 1.7286711611662488e-06, | |
| "loss": 0.4103, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 1.5104, | |
| "grad_norm": 1.464895486831665, | |
| "learning_rate": 1.7181244828248294e-06, | |
| "loss": 0.3967, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 1.512, | |
| "grad_norm": 1.4883111715316772, | |
| "learning_rate": 1.7076033972079503e-06, | |
| "loss": 0.3458, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 1.5135999999999998, | |
| "grad_norm": 1.9913544654846191, | |
| "learning_rate": 1.6971079863611534e-06, | |
| "loss": 0.4022, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 1.5152, | |
| "grad_norm": 2.0531091690063477, | |
| "learning_rate": 1.6866383321297614e-06, | |
| "loss": 0.3882, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 1.5168, | |
| "grad_norm": 1.244572639465332, | |
| "learning_rate": 1.6761945161582382e-06, | |
| "loss": 0.3862, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 1.5184, | |
| "grad_norm": 1.6240170001983643, | |
| "learning_rate": 1.665776619889562e-06, | |
| "loss": 0.3529, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 1.4777076244354248, | |
| "learning_rate": 1.6553847245645787e-06, | |
| "loss": 0.3739, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 1.5215999999999998, | |
| "grad_norm": 1.4702836275100708, | |
| "learning_rate": 1.645018911221376e-06, | |
| "loss": 0.3271, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 1.5232, | |
| "grad_norm": 1.6094434261322021, | |
| "learning_rate": 1.6346792606946466e-06, | |
| "loss": 0.3727, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 1.5248, | |
| "grad_norm": 2.058452844619751, | |
| "learning_rate": 1.6243658536150657e-06, | |
| "loss": 0.4234, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 1.5264, | |
| "grad_norm": 1.6575367450714111, | |
| "learning_rate": 1.6140787704086502e-06, | |
| "loss": 0.3904, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 1.528, | |
| "grad_norm": 1.5286281108856201, | |
| "learning_rate": 1.6038180912961455e-06, | |
| "loss": 0.3878, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 1.5295999999999998, | |
| "grad_norm": 1.4369785785675049, | |
| "learning_rate": 1.5935838962923849e-06, | |
| "loss": 0.429, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 1.5312000000000001, | |
| "grad_norm": 1.5446891784667969, | |
| "learning_rate": 1.5833762652056773e-06, | |
| "loss": 0.3974, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 1.5328, | |
| "grad_norm": 1.51997709274292, | |
| "learning_rate": 1.5731952776371828e-06, | |
| "loss": 0.3821, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 1.5344, | |
| "grad_norm": 1.4272058010101318, | |
| "learning_rate": 1.5630410129802837e-06, | |
| "loss": 0.3661, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 1.536, | |
| "grad_norm": 1.5782861709594727, | |
| "learning_rate": 1.55291355041998e-06, | |
| "loss": 0.4284, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 1.5375999999999999, | |
| "grad_norm": 1.3205955028533936, | |
| "learning_rate": 1.5428129689322552e-06, | |
| "loss": 0.3933, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 1.5392000000000001, | |
| "grad_norm": 1.5751023292541504, | |
| "learning_rate": 1.5327393472834772e-06, | |
| "loss": 0.3658, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 1.5408, | |
| "grad_norm": 1.3711270093917847, | |
| "learning_rate": 1.5226927640297663e-06, | |
| "loss": 0.3506, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 1.5424, | |
| "grad_norm": 1.6612223386764526, | |
| "learning_rate": 1.5126732975164e-06, | |
| "loss": 0.3912, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 1.544, | |
| "grad_norm": 1.7373991012573242, | |
| "learning_rate": 1.5026810258771885e-06, | |
| "loss": 0.4037, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 1.5455999999999999, | |
| "grad_norm": 1.6066126823425293, | |
| "learning_rate": 1.492716027033876e-06, | |
| "loss": 0.3973, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 1.5472000000000001, | |
| "grad_norm": 1.4465765953063965, | |
| "learning_rate": 1.4827783786955224e-06, | |
| "loss": 0.3703, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 1.5488, | |
| "grad_norm": 1.5032522678375244, | |
| "learning_rate": 1.4728681583579091e-06, | |
| "loss": 0.3711, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 1.5504, | |
| "grad_norm": 1.4513039588928223, | |
| "learning_rate": 1.4629854433029234e-06, | |
| "loss": 0.3861, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 1.552, | |
| "grad_norm": 1.62880277633667, | |
| "learning_rate": 1.4531303105979605e-06, | |
| "loss": 0.4177, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 1.5535999999999999, | |
| "grad_norm": 1.6827924251556396, | |
| "learning_rate": 1.4433028370953279e-06, | |
| "loss": 0.3677, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 1.5552000000000001, | |
| "grad_norm": 1.556490421295166, | |
| "learning_rate": 1.4335030994316357e-06, | |
| "loss": 0.3774, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 1.5568, | |
| "grad_norm": 1.933334231376648, | |
| "learning_rate": 1.4237311740272097e-06, | |
| "loss": 0.3974, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 1.5584, | |
| "grad_norm": 1.4975850582122803, | |
| "learning_rate": 1.413987137085484e-06, | |
| "loss": 0.384, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "grad_norm": 1.7752515077590942, | |
| "learning_rate": 1.4042710645924207e-06, | |
| "loss": 0.3538, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 1.5615999999999999, | |
| "grad_norm": 1.4480066299438477, | |
| "learning_rate": 1.3945830323158982e-06, | |
| "loss": 0.3511, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 1.5632000000000001, | |
| "grad_norm": 1.6013789176940918, | |
| "learning_rate": 1.3849231158051418e-06, | |
| "loss": 0.3802, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 1.5648, | |
| "grad_norm": 1.5784839391708374, | |
| "learning_rate": 1.3752913903901227e-06, | |
| "loss": 0.3459, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 1.5664, | |
| "grad_norm": 1.4082424640655518, | |
| "learning_rate": 1.3656879311809674e-06, | |
| "loss": 0.3836, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 1.568, | |
| "grad_norm": 1.5370289087295532, | |
| "learning_rate": 1.3561128130673823e-06, | |
| "loss": 0.3696, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 1.5695999999999999, | |
| "grad_norm": 1.4042068719863892, | |
| "learning_rate": 1.346566110718061e-06, | |
| "loss": 0.388, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 1.5712000000000002, | |
| "grad_norm": 2.047809600830078, | |
| "learning_rate": 1.3370478985801062e-06, | |
| "loss": 0.4054, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 1.5728, | |
| "grad_norm": 1.5325422286987305, | |
| "learning_rate": 1.3275582508784462e-06, | |
| "loss": 0.3984, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 1.5744, | |
| "grad_norm": 1.8603466749191284, | |
| "learning_rate": 1.3180972416152637e-06, | |
| "loss": 0.3489, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 1.576, | |
| "grad_norm": 2.124300241470337, | |
| "learning_rate": 1.3086649445694056e-06, | |
| "loss": 0.4161, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 1.5776, | |
| "grad_norm": 1.6698065996170044, | |
| "learning_rate": 1.2992614332958226e-06, | |
| "loss": 0.3939, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 1.5792000000000002, | |
| "grad_norm": 1.699593186378479, | |
| "learning_rate": 1.2898867811249832e-06, | |
| "loss": 0.3841, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 1.5808, | |
| "grad_norm": 1.5307472944259644, | |
| "learning_rate": 1.280541061162306e-06, | |
| "loss": 0.3339, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 1.5824, | |
| "grad_norm": 1.9167606830596924, | |
| "learning_rate": 1.2712243462875967e-06, | |
| "loss": 0.384, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 1.584, | |
| "grad_norm": 1.8776471614837646, | |
| "learning_rate": 1.2619367091544654e-06, | |
| "loss": 0.3986, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 1.5856, | |
| "grad_norm": 1.6647443771362305, | |
| "learning_rate": 1.2526782221897755e-06, | |
| "loss": 0.3765, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 1.5872000000000002, | |
| "grad_norm": 1.3155474662780762, | |
| "learning_rate": 1.2434489575930652e-06, | |
| "loss": 0.3671, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 1.5888, | |
| "grad_norm": 1.5139459371566772, | |
| "learning_rate": 1.234248987335997e-06, | |
| "loss": 0.427, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 1.5904, | |
| "grad_norm": 1.8827879428863525, | |
| "learning_rate": 1.2250783831617852e-06, | |
| "loss": 0.375, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 1.592, | |
| "grad_norm": 1.7208526134490967, | |
| "learning_rate": 1.215937216584644e-06, | |
| "loss": 0.3652, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 1.5936, | |
| "grad_norm": 1.4981166124343872, | |
| "learning_rate": 1.206825558889224e-06, | |
| "loss": 0.4189, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 1.5952, | |
| "grad_norm": 1.5617376565933228, | |
| "learning_rate": 1.1977434811300664e-06, | |
| "loss": 0.3264, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 1.5968, | |
| "grad_norm": 1.882985234260559, | |
| "learning_rate": 1.1886910541310342e-06, | |
| "loss": 0.3789, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 1.5984, | |
| "grad_norm": 1.4523001909255981, | |
| "learning_rate": 1.1796683484847731e-06, | |
| "loss": 0.3622, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 1.3047678470611572, | |
| "learning_rate": 1.1706754345521582e-06, | |
| "loss": 0.4151, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 1.6016, | |
| "grad_norm": 1.352819561958313, | |
| "learning_rate": 1.1617123824617315e-06, | |
| "loss": 0.3736, | |
| "step": 10010 | |
| }, | |
| { | |
| "epoch": 1.6032, | |
| "grad_norm": 2.0007340908050537, | |
| "learning_rate": 1.1527792621091787e-06, | |
| "loss": 0.3852, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 1.6048, | |
| "grad_norm": 1.821365237236023, | |
| "learning_rate": 1.1438761431567641e-06, | |
| "loss": 0.3721, | |
| "step": 10030 | |
| }, | |
| { | |
| "epoch": 1.6064, | |
| "grad_norm": 1.8121904134750366, | |
| "learning_rate": 1.1350030950328001e-06, | |
| "loss": 0.4221, | |
| "step": 10040 | |
| }, | |
| { | |
| "epoch": 1.608, | |
| "grad_norm": 1.660535454750061, | |
| "learning_rate": 1.1261601869310962e-06, | |
| "loss": 0.4033, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 1.6096, | |
| "grad_norm": 1.5814636945724487, | |
| "learning_rate": 1.1173474878104285e-06, | |
| "loss": 0.336, | |
| "step": 10060 | |
| }, | |
| { | |
| "epoch": 1.6112, | |
| "grad_norm": 1.4385359287261963, | |
| "learning_rate": 1.1085650663939933e-06, | |
| "loss": 0.361, | |
| "step": 10070 | |
| }, | |
| { | |
| "epoch": 1.6128, | |
| "grad_norm": 1.5149379968643188, | |
| "learning_rate": 1.0998129911688766e-06, | |
| "loss": 0.3543, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 1.6143999999999998, | |
| "grad_norm": 1.4090138673782349, | |
| "learning_rate": 1.0910913303855208e-06, | |
| "loss": 0.3992, | |
| "step": 10090 | |
| }, | |
| { | |
| "epoch": 1.616, | |
| "grad_norm": 1.626124382019043, | |
| "learning_rate": 1.082400152057187e-06, | |
| "loss": 0.3989, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 1.6176, | |
| "grad_norm": 1.3360874652862549, | |
| "learning_rate": 1.0737395239594318e-06, | |
| "loss": 0.377, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 1.6192, | |
| "grad_norm": 1.9560426473617554, | |
| "learning_rate": 1.0651095136295713e-06, | |
| "loss": 0.4179, | |
| "step": 10120 | |
| }, | |
| { | |
| "epoch": 1.6208, | |
| "grad_norm": 1.7876766920089722, | |
| "learning_rate": 1.05651018836616e-06, | |
| "loss": 0.3768, | |
| "step": 10130 | |
| }, | |
| { | |
| "epoch": 1.6223999999999998, | |
| "grad_norm": 1.450716257095337, | |
| "learning_rate": 1.0479416152284622e-06, | |
| "loss": 0.3878, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 1.624, | |
| "grad_norm": 1.8666528463363647, | |
| "learning_rate": 1.0394038610359352e-06, | |
| "loss": 0.3613, | |
| "step": 10150 | |
| }, | |
| { | |
| "epoch": 1.6256, | |
| "grad_norm": 1.862252950668335, | |
| "learning_rate": 1.0308969923676987e-06, | |
| "loss": 0.373, | |
| "step": 10160 | |
| }, | |
| { | |
| "epoch": 1.6272, | |
| "grad_norm": 1.232593297958374, | |
| "learning_rate": 1.0224210755620257e-06, | |
| "loss": 0.3889, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 1.6288, | |
| "grad_norm": 1.9109057188034058, | |
| "learning_rate": 1.0139761767158158e-06, | |
| "loss": 0.3863, | |
| "step": 10180 | |
| }, | |
| { | |
| "epoch": 1.6303999999999998, | |
| "grad_norm": 1.4961355924606323, | |
| "learning_rate": 1.0055623616840893e-06, | |
| "loss": 0.3826, | |
| "step": 10190 | |
| }, | |
| { | |
| "epoch": 1.6320000000000001, | |
| "grad_norm": 1.5885121822357178, | |
| "learning_rate": 9.971796960794644e-07, | |
| "loss": 0.4288, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 1.6336, | |
| "grad_norm": 1.5879653692245483, | |
| "learning_rate": 9.888282452716507e-07, | |
| "loss": 0.3752, | |
| "step": 10210 | |
| }, | |
| { | |
| "epoch": 1.6352, | |
| "grad_norm": 1.332420825958252, | |
| "learning_rate": 9.805080743869406e-07, | |
| "loss": 0.377, | |
| "step": 10220 | |
| }, | |
| { | |
| "epoch": 1.6368, | |
| "grad_norm": 1.404613971710205, | |
| "learning_rate": 9.722192483076965e-07, | |
| "loss": 0.3717, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 1.6383999999999999, | |
| "grad_norm": 1.5146169662475586, | |
| "learning_rate": 9.639618316718519e-07, | |
| "loss": 0.3396, | |
| "step": 10240 | |
| }, | |
| { | |
| "epoch": 1.6400000000000001, | |
| "grad_norm": 1.7268131971359253, | |
| "learning_rate": 9.557358888723977e-07, | |
| "loss": 0.4072, | |
| "step": 10250 | |
| }, | |
| { | |
| "epoch": 1.6416, | |
| "grad_norm": 1.4972001314163208, | |
| "learning_rate": 9.475414840568903e-07, | |
| "loss": 0.3833, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 1.6432, | |
| "grad_norm": 1.6699305772781372, | |
| "learning_rate": 9.393786811269418e-07, | |
| "loss": 0.4089, | |
| "step": 10270 | |
| }, | |
| { | |
| "epoch": 1.6448, | |
| "grad_norm": 1.567429542541504, | |
| "learning_rate": 9.312475437377322e-07, | |
| "loss": 0.4105, | |
| "step": 10280 | |
| }, | |
| { | |
| "epoch": 1.6463999999999999, | |
| "grad_norm": 1.553330659866333, | |
| "learning_rate": 9.231481352975014e-07, | |
| "loss": 0.4032, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 1.6480000000000001, | |
| "grad_norm": 1.4948378801345825, | |
| "learning_rate": 9.150805189670653e-07, | |
| "loss": 0.3759, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 1.6496, | |
| "grad_norm": 1.4923298358917236, | |
| "learning_rate": 9.070447576593172e-07, | |
| "loss": 0.3886, | |
| "step": 10310 | |
| }, | |
| { | |
| "epoch": 1.6512, | |
| "grad_norm": 1.9077274799346924, | |
| "learning_rate": 8.990409140387374e-07, | |
| "loss": 0.3707, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 1.6528, | |
| "grad_norm": 1.3481954336166382, | |
| "learning_rate": 8.910690505209063e-07, | |
| "loss": 0.3717, | |
| "step": 10330 | |
| }, | |
| { | |
| "epoch": 1.6543999999999999, | |
| "grad_norm": 1.884946346282959, | |
| "learning_rate": 8.831292292720151e-07, | |
| "loss": 0.369, | |
| "step": 10340 | |
| }, | |
| { | |
| "epoch": 1.6560000000000001, | |
| "grad_norm": 1.663649559020996, | |
| "learning_rate": 8.752215122083874e-07, | |
| "loss": 0.3492, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 1.6576, | |
| "grad_norm": 1.922509789466858, | |
| "learning_rate": 8.673459609959872e-07, | |
| "loss": 0.3628, | |
| "step": 10360 | |
| }, | |
| { | |
| "epoch": 1.6592, | |
| "grad_norm": 1.518120288848877, | |
| "learning_rate": 8.595026370499477e-07, | |
| "loss": 0.3799, | |
| "step": 10370 | |
| }, | |
| { | |
| "epoch": 1.6608, | |
| "grad_norm": 1.713357925415039, | |
| "learning_rate": 8.516916015340826e-07, | |
| "loss": 0.3813, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 1.6623999999999999, | |
| "grad_norm": 1.514403223991394, | |
| "learning_rate": 8.439129153604148e-07, | |
| "loss": 0.3645, | |
| "step": 10390 | |
| }, | |
| { | |
| "epoch": 1.6640000000000001, | |
| "grad_norm": 2.057803153991699, | |
| "learning_rate": 8.361666391887047e-07, | |
| "loss": 0.4249, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 1.6656, | |
| "grad_norm": 1.480934500694275, | |
| "learning_rate": 8.284528334259667e-07, | |
| "loss": 0.3615, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 1.6672, | |
| "grad_norm": 1.8613368272781372, | |
| "learning_rate": 8.207715582260112e-07, | |
| "loss": 0.3949, | |
| "step": 10420 | |
| }, | |
| { | |
| "epoch": 1.6688, | |
| "grad_norm": 1.760693073272705, | |
| "learning_rate": 8.131228734889618e-07, | |
| "loss": 0.4143, | |
| "step": 10430 | |
| }, | |
| { | |
| "epoch": 1.6703999999999999, | |
| "grad_norm": 1.948075532913208, | |
| "learning_rate": 8.055068388608011e-07, | |
| "loss": 0.4005, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 1.6720000000000002, | |
| "grad_norm": 1.2073793411254883, | |
| "learning_rate": 7.979235137328961e-07, | |
| "loss": 0.3635, | |
| "step": 10450 | |
| }, | |
| { | |
| "epoch": 1.6736, | |
| "grad_norm": 1.843197226524353, | |
| "learning_rate": 7.903729572415397e-07, | |
| "loss": 0.3961, | |
| "step": 10460 | |
| }, | |
| { | |
| "epoch": 1.6752, | |
| "grad_norm": 1.7495695352554321, | |
| "learning_rate": 7.828552282674867e-07, | |
| "loss": 0.3905, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 1.6768, | |
| "grad_norm": 1.5082989931106567, | |
| "learning_rate": 7.753703854354999e-07, | |
| "loss": 0.3647, | |
| "step": 10480 | |
| }, | |
| { | |
| "epoch": 1.6784, | |
| "grad_norm": 1.2426806688308716, | |
| "learning_rate": 7.679184871138851e-07, | |
| "loss": 0.3686, | |
| "step": 10490 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 1.6325470209121704, | |
| "learning_rate": 7.60499591414045e-07, | |
| "loss": 0.3561, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 1.6816, | |
| "grad_norm": 1.4102387428283691, | |
| "learning_rate": 7.53113756190017e-07, | |
| "loss": 0.3894, | |
| "step": 10510 | |
| }, | |
| { | |
| "epoch": 1.6832, | |
| "grad_norm": 1.6540184020996094, | |
| "learning_rate": 7.457610390380265e-07, | |
| "loss": 0.363, | |
| "step": 10520 | |
| }, | |
| { | |
| "epoch": 1.6848, | |
| "grad_norm": 1.9609912633895874, | |
| "learning_rate": 7.384414972960419e-07, | |
| "loss": 0.3788, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 1.6864, | |
| "grad_norm": 1.4535493850708008, | |
| "learning_rate": 7.311551880433171e-07, | |
| "loss": 0.357, | |
| "step": 10540 | |
| }, | |
| { | |
| "epoch": 1.688, | |
| "grad_norm": 1.4074710607528687, | |
| "learning_rate": 7.239021680999575e-07, | |
| "loss": 0.3431, | |
| "step": 10550 | |
| }, | |
| { | |
| "epoch": 1.6896, | |
| "grad_norm": 1.5304536819458008, | |
| "learning_rate": 7.166824940264683e-07, | |
| "loss": 0.3558, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 1.6912, | |
| "grad_norm": 1.597450852394104, | |
| "learning_rate": 7.094962221233192e-07, | |
| "loss": 0.3828, | |
| "step": 10570 | |
| }, | |
| { | |
| "epoch": 1.6928, | |
| "grad_norm": 1.4672627449035645, | |
| "learning_rate": 7.02343408430502e-07, | |
| "loss": 0.3785, | |
| "step": 10580 | |
| }, | |
| { | |
| "epoch": 1.6944, | |
| "grad_norm": 1.5955575704574585, | |
| "learning_rate": 6.952241087270938e-07, | |
| "loss": 0.3799, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 1.696, | |
| "grad_norm": 1.627501130104065, | |
| "learning_rate": 6.881383785308232e-07, | |
| "loss": 0.4139, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 1.6976, | |
| "grad_norm": 1.7322561740875244, | |
| "learning_rate": 6.810862730976392e-07, | |
| "loss": 0.3871, | |
| "step": 10610 | |
| }, | |
| { | |
| "epoch": 1.6992, | |
| "grad_norm": 1.359447717666626, | |
| "learning_rate": 6.74067847421277e-07, | |
| "loss": 0.3434, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 1.7008, | |
| "grad_norm": 1.6238139867782593, | |
| "learning_rate": 6.67083156232829e-07, | |
| "loss": 0.3698, | |
| "step": 10630 | |
| }, | |
| { | |
| "epoch": 1.7024, | |
| "grad_norm": 1.4961482286453247, | |
| "learning_rate": 6.601322540003202e-07, | |
| "loss": 0.3928, | |
| "step": 10640 | |
| }, | |
| { | |
| "epoch": 1.704, | |
| "grad_norm": 1.577006459236145, | |
| "learning_rate": 6.532151949282811e-07, | |
| "loss": 0.3463, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 1.7056, | |
| "grad_norm": 1.6705478429794312, | |
| "learning_rate": 6.463320329573303e-07, | |
| "loss": 0.4067, | |
| "step": 10660 | |
| }, | |
| { | |
| "epoch": 1.7072, | |
| "grad_norm": 1.5644800662994385, | |
| "learning_rate": 6.394828217637455e-07, | |
| "loss": 0.3574, | |
| "step": 10670 | |
| }, | |
| { | |
| "epoch": 1.7088, | |
| "grad_norm": 1.5178284645080566, | |
| "learning_rate": 6.326676147590533e-07, | |
| "loss": 0.3706, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 1.7104, | |
| "grad_norm": 1.4503835439682007, | |
| "learning_rate": 6.258864650896051e-07, | |
| "loss": 0.3708, | |
| "step": 10690 | |
| }, | |
| { | |
| "epoch": 1.712, | |
| "grad_norm": 1.6232348680496216, | |
| "learning_rate": 6.191394256361699e-07, | |
| "loss": 0.4011, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 1.7136, | |
| "grad_norm": 1.4016155004501343, | |
| "learning_rate": 6.124265490135161e-07, | |
| "loss": 0.3962, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 1.7151999999999998, | |
| "grad_norm": 2.123457431793213, | |
| "learning_rate": 6.057478875700035e-07, | |
| "loss": 0.4026, | |
| "step": 10720 | |
| }, | |
| { | |
| "epoch": 1.7168, | |
| "grad_norm": 1.6403712034225464, | |
| "learning_rate": 5.991034933871764e-07, | |
| "loss": 0.3487, | |
| "step": 10730 | |
| }, | |
| { | |
| "epoch": 1.7184, | |
| "grad_norm": 1.5305230617523193, | |
| "learning_rate": 5.92493418279354e-07, | |
| "loss": 0.3858, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 1.749670147895813, | |
| "learning_rate": 5.859177137932315e-07, | |
| "loss": 0.342, | |
| "step": 10750 | |
| }, | |
| { | |
| "epoch": 1.7216, | |
| "grad_norm": 1.7683908939361572, | |
| "learning_rate": 5.793764312074735e-07, | |
| "loss": 0.3677, | |
| "step": 10760 | |
| }, | |
| { | |
| "epoch": 1.7231999999999998, | |
| "grad_norm": 1.8332089185714722, | |
| "learning_rate": 5.728696215323143e-07, | |
| "loss": 0.3929, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 1.7248, | |
| "grad_norm": 1.893444538116455, | |
| "learning_rate": 5.663973355091624e-07, | |
| "loss": 0.381, | |
| "step": 10780 | |
| }, | |
| { | |
| "epoch": 1.7264, | |
| "grad_norm": 1.8476930856704712, | |
| "learning_rate": 5.599596236102068e-07, | |
| "loss": 0.3643, | |
| "step": 10790 | |
| }, | |
| { | |
| "epoch": 1.728, | |
| "grad_norm": 1.5751250982284546, | |
| "learning_rate": 5.535565360380146e-07, | |
| "loss": 0.3543, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 1.7296, | |
| "grad_norm": 1.8975834846496582, | |
| "learning_rate": 5.471881227251518e-07, | |
| "loss": 0.3749, | |
| "step": 10810 | |
| }, | |
| { | |
| "epoch": 1.7311999999999999, | |
| "grad_norm": 1.4232405424118042, | |
| "learning_rate": 5.408544333337845e-07, | |
| "loss": 0.3659, | |
| "step": 10820 | |
| }, | |
| { | |
| "epoch": 1.7328000000000001, | |
| "grad_norm": 1.7107577323913574, | |
| "learning_rate": 5.345555172552941e-07, | |
| "loss": 0.3537, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 1.7344, | |
| "grad_norm": 1.8169152736663818, | |
| "learning_rate": 5.28291423609894e-07, | |
| "loss": 0.369, | |
| "step": 10840 | |
| }, | |
| { | |
| "epoch": 1.736, | |
| "grad_norm": 1.451088547706604, | |
| "learning_rate": 5.220622012462429e-07, | |
| "loss": 0.3981, | |
| "step": 10850 | |
| }, | |
| { | |
| "epoch": 1.7376, | |
| "grad_norm": 1.5496747493743896, | |
| "learning_rate": 5.15867898741071e-07, | |
| "loss": 0.3314, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 1.7391999999999999, | |
| "grad_norm": 1.4566885232925415, | |
| "learning_rate": 5.09708564398791e-07, | |
| "loss": 0.3625, | |
| "step": 10870 | |
| }, | |
| { | |
| "epoch": 1.7408000000000001, | |
| "grad_norm": 1.9700531959533691, | |
| "learning_rate": 5.035842462511309e-07, | |
| "loss": 0.3571, | |
| "step": 10880 | |
| }, | |
| { | |
| "epoch": 1.7424, | |
| "grad_norm": 2.2332420349121094, | |
| "learning_rate": 4.97494992056754e-07, | |
| "loss": 0.3875, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 1.744, | |
| "grad_norm": 1.8007875680923462, | |
| "learning_rate": 4.914408493008871e-07, | |
| "loss": 0.3617, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 1.7456, | |
| "grad_norm": 1.295969843864441, | |
| "learning_rate": 4.85421865194951e-07, | |
| "loss": 0.3461, | |
| "step": 10910 | |
| }, | |
| { | |
| "epoch": 1.7471999999999999, | |
| "grad_norm": 1.4639779329299927, | |
| "learning_rate": 4.794380866761928e-07, | |
| "loss": 0.3617, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 1.7488000000000001, | |
| "grad_norm": 1.647478699684143, | |
| "learning_rate": 4.734895604073214e-07, | |
| "loss": 0.4066, | |
| "step": 10930 | |
| }, | |
| { | |
| "epoch": 1.7504, | |
| "grad_norm": 1.9526691436767578, | |
| "learning_rate": 4.6757633277613734e-07, | |
| "loss": 0.4198, | |
| "step": 10940 | |
| }, | |
| { | |
| "epoch": 1.752, | |
| "grad_norm": 1.6654764413833618, | |
| "learning_rate": 4.616984498951793e-07, | |
| "loss": 0.3591, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 1.7536, | |
| "grad_norm": 1.588196039199829, | |
| "learning_rate": 4.5585595760135825e-07, | |
| "loss": 0.3839, | |
| "step": 10960 | |
| }, | |
| { | |
| "epoch": 1.7551999999999999, | |
| "grad_norm": 1.331149697303772, | |
| "learning_rate": 4.50048901455602e-07, | |
| "loss": 0.3567, | |
| "step": 10970 | |
| }, | |
| { | |
| "epoch": 1.7568000000000001, | |
| "grad_norm": 1.6753369569778442, | |
| "learning_rate": 4.4427732674250045e-07, | |
| "loss": 0.4031, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 1.7584, | |
| "grad_norm": 1.389146089553833, | |
| "learning_rate": 4.385412784699544e-07, | |
| "loss": 0.3414, | |
| "step": 10990 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 1.635467290878296, | |
| "learning_rate": 4.3284080136881847e-07, | |
| "loss": 0.4173, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 1.7616, | |
| "grad_norm": 1.63181471824646, | |
| "learning_rate": 4.271759398925601e-07, | |
| "loss": 0.3556, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 1.7631999999999999, | |
| "grad_norm": 1.492443561553955, | |
| "learning_rate": 4.2154673821690585e-07, | |
| "loss": 0.4263, | |
| "step": 11020 | |
| }, | |
| { | |
| "epoch": 1.7648000000000001, | |
| "grad_norm": 1.5528181791305542, | |
| "learning_rate": 4.159532402395011e-07, | |
| "loss": 0.3953, | |
| "step": 11030 | |
| }, | |
| { | |
| "epoch": 1.7664, | |
| "grad_norm": 1.9448027610778809, | |
| "learning_rate": 4.1039548957956807e-07, | |
| "loss": 0.3745, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 1.768, | |
| "grad_norm": 1.743765115737915, | |
| "learning_rate": 4.048735295775608e-07, | |
| "loss": 0.3652, | |
| "step": 11050 | |
| }, | |
| { | |
| "epoch": 1.7696, | |
| "grad_norm": 1.3443995714187622, | |
| "learning_rate": 3.9938740329483473e-07, | |
| "loss": 0.3784, | |
| "step": 11060 | |
| }, | |
| { | |
| "epoch": 1.7711999999999999, | |
| "grad_norm": 1.7259010076522827, | |
| "learning_rate": 3.9393715351330243e-07, | |
| "loss": 0.3834, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 1.7728000000000002, | |
| "grad_norm": 1.3750628232955933, | |
| "learning_rate": 3.88522822735109e-07, | |
| "loss": 0.375, | |
| "step": 11080 | |
| }, | |
| { | |
| "epoch": 1.7744, | |
| "grad_norm": 1.5124554634094238, | |
| "learning_rate": 3.83144453182292e-07, | |
| "loss": 0.4068, | |
| "step": 11090 | |
| }, | |
| { | |
| "epoch": 1.776, | |
| "grad_norm": 1.3854613304138184, | |
| "learning_rate": 3.7780208679645826e-07, | |
| "loss": 0.3416, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 1.7776, | |
| "grad_norm": 1.6818737983703613, | |
| "learning_rate": 3.72495765238452e-07, | |
| "loss": 0.4129, | |
| "step": 11110 | |
| }, | |
| { | |
| "epoch": 1.7792, | |
| "grad_norm": 1.854150652885437, | |
| "learning_rate": 3.672255298880367e-07, | |
| "loss": 0.3926, | |
| "step": 11120 | |
| }, | |
| { | |
| "epoch": 1.7808000000000002, | |
| "grad_norm": 1.5101863145828247, | |
| "learning_rate": 3.619914218435666e-07, | |
| "loss": 0.3698, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 1.7824, | |
| "grad_norm": 2.327683687210083, | |
| "learning_rate": 3.5679348192166675e-07, | |
| "loss": 0.3634, | |
| "step": 11140 | |
| }, | |
| { | |
| "epoch": 1.784, | |
| "grad_norm": 1.7254990339279175, | |
| "learning_rate": 3.516317506569172e-07, | |
| "loss": 0.384, | |
| "step": 11150 | |
| }, | |
| { | |
| "epoch": 1.7856, | |
| "grad_norm": 1.6353331804275513, | |
| "learning_rate": 3.465062683015341e-07, | |
| "loss": 0.4179, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 1.7872, | |
| "grad_norm": 1.6848180294036865, | |
| "learning_rate": 3.4141707482506056e-07, | |
| "loss": 0.403, | |
| "step": 11170 | |
| }, | |
| { | |
| "epoch": 1.7888, | |
| "grad_norm": 1.6265270709991455, | |
| "learning_rate": 3.3636420991404686e-07, | |
| "loss": 0.3253, | |
| "step": 11180 | |
| }, | |
| { | |
| "epoch": 1.7904, | |
| "grad_norm": 1.3600866794586182, | |
| "learning_rate": 3.3134771297175127e-07, | |
| "loss": 0.3814, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 1.792, | |
| "grad_norm": 1.7088428735733032, | |
| "learning_rate": 3.263676231178231e-07, | |
| "loss": 0.376, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 1.7936, | |
| "grad_norm": 1.9018006324768066, | |
| "learning_rate": 3.2142397918800416e-07, | |
| "loss": 0.3556, | |
| "step": 11210 | |
| }, | |
| { | |
| "epoch": 1.7952, | |
| "grad_norm": 1.4532266855239868, | |
| "learning_rate": 3.165168197338231e-07, | |
| "loss": 0.3601, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 1.7968, | |
| "grad_norm": 2.064441442489624, | |
| "learning_rate": 3.116461830222933e-07, | |
| "loss": 0.4238, | |
| "step": 11230 | |
| }, | |
| { | |
| "epoch": 1.7984, | |
| "grad_norm": 1.6068156957626343, | |
| "learning_rate": 3.068121070356206e-07, | |
| "loss": 0.343, | |
| "step": 11240 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "grad_norm": 1.518755555152893, | |
| "learning_rate": 3.0201462947089865e-07, | |
| "loss": 0.3723, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 1.8016, | |
| "grad_norm": 1.7997151613235474, | |
| "learning_rate": 2.9725378773982295e-07, | |
| "loss": 0.3856, | |
| "step": 11260 | |
| }, | |
| { | |
| "epoch": 1.8032, | |
| "grad_norm": 1.9629850387573242, | |
| "learning_rate": 2.9252961896839236e-07, | |
| "loss": 0.3855, | |
| "step": 11270 | |
| }, | |
| { | |
| "epoch": 1.8048, | |
| "grad_norm": 1.6617012023925781, | |
| "learning_rate": 2.878421599966252e-07, | |
| "loss": 0.3943, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 1.8064, | |
| "grad_norm": 1.8224092721939087, | |
| "learning_rate": 2.83191447378266e-07, | |
| "loss": 0.4093, | |
| "step": 11290 | |
| }, | |
| { | |
| "epoch": 1.808, | |
| "grad_norm": 1.5975474119186401, | |
| "learning_rate": 2.785775173805083e-07, | |
| "loss": 0.3762, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 1.8096, | |
| "grad_norm": 1.5036040544509888, | |
| "learning_rate": 2.740004059837031e-07, | |
| "loss": 0.3682, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 1.8112, | |
| "grad_norm": 1.499791145324707, | |
| "learning_rate": 2.694601488810855e-07, | |
| "loss": 0.3789, | |
| "step": 11320 | |
| }, | |
| { | |
| "epoch": 1.8128, | |
| "grad_norm": 1.253347396850586, | |
| "learning_rate": 2.649567814784937e-07, | |
| "loss": 0.3373, | |
| "step": 11330 | |
| }, | |
| { | |
| "epoch": 1.8144, | |
| "grad_norm": 1.5301799774169922, | |
| "learning_rate": 2.604903388940899e-07, | |
| "loss": 0.3555, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 1.8159999999999998, | |
| "grad_norm": 1.3616042137145996, | |
| "learning_rate": 2.5606085595809015e-07, | |
| "loss": 0.3711, | |
| "step": 11350 | |
| }, | |
| { | |
| "epoch": 1.8176, | |
| "grad_norm": 1.5854843854904175, | |
| "learning_rate": 2.5166836721249254e-07, | |
| "loss": 0.365, | |
| "step": 11360 | |
| }, | |
| { | |
| "epoch": 1.8192, | |
| "grad_norm": 1.5927246809005737, | |
| "learning_rate": 2.4731290691080766e-07, | |
| "loss": 0.3889, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 1.8208, | |
| "grad_norm": 1.7462496757507324, | |
| "learning_rate": 2.429945090177888e-07, | |
| "loss": 0.4279, | |
| "step": 11380 | |
| }, | |
| { | |
| "epoch": 1.8224, | |
| "grad_norm": 1.5840808153152466, | |
| "learning_rate": 2.387132072091708e-07, | |
| "loss": 0.3415, | |
| "step": 11390 | |
| }, | |
| { | |
| "epoch": 1.8239999999999998, | |
| "grad_norm": 1.6024113893508911, | |
| "learning_rate": 2.344690348714046e-07, | |
| "loss": 0.3874, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 1.8256000000000001, | |
| "grad_norm": 1.9357134103775024, | |
| "learning_rate": 2.3026202510139928e-07, | |
| "loss": 0.4067, | |
| "step": 11410 | |
| }, | |
| { | |
| "epoch": 1.8272, | |
| "grad_norm": 1.5605065822601318, | |
| "learning_rate": 2.2609221070626132e-07, | |
| "loss": 0.3849, | |
| "step": 11420 | |
| }, | |
| { | |
| "epoch": 1.8288, | |
| "grad_norm": 2.1521353721618652, | |
| "learning_rate": 2.2195962420304083e-07, | |
| "loss": 0.3867, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 1.8304, | |
| "grad_norm": 1.2614628076553345, | |
| "learning_rate": 2.1786429781847972e-07, | |
| "loss": 0.3663, | |
| "step": 11440 | |
| }, | |
| { | |
| "epoch": 1.8319999999999999, | |
| "grad_norm": 1.4092823266983032, | |
| "learning_rate": 2.1380626348875278e-07, | |
| "loss": 0.3684, | |
| "step": 11450 | |
| }, | |
| { | |
| "epoch": 1.8336000000000001, | |
| "grad_norm": 1.652994990348816, | |
| "learning_rate": 2.0978555285922963e-07, | |
| "loss": 0.3931, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 1.8352, | |
| "grad_norm": 2.124891996383667, | |
| "learning_rate": 2.058021972842178e-07, | |
| "loss": 0.41, | |
| "step": 11470 | |
| }, | |
| { | |
| "epoch": 1.8368, | |
| "grad_norm": 2.1776928901672363, | |
| "learning_rate": 2.0185622782672497e-07, | |
| "loss": 0.3999, | |
| "step": 11480 | |
| }, | |
| { | |
| "epoch": 1.8384, | |
| "grad_norm": 1.6700326204299927, | |
| "learning_rate": 1.9794767525821212e-07, | |
| "loss": 0.4369, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 1.4016342163085938, | |
| "learning_rate": 1.9407657005835967e-07, | |
| "loss": 0.3783, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 1.8416000000000001, | |
| "grad_norm": 1.7040356397628784, | |
| "learning_rate": 1.9024294241482112e-07, | |
| "loss": 0.3766, | |
| "step": 11510 | |
| }, | |
| { | |
| "epoch": 1.8432, | |
| "grad_norm": 1.408155918121338, | |
| "learning_rate": 1.8644682222299703e-07, | |
| "loss": 0.3491, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 1.8448, | |
| "grad_norm": 1.7515614032745361, | |
| "learning_rate": 1.826882390857948e-07, | |
| "loss": 0.4108, | |
| "step": 11530 | |
| }, | |
| { | |
| "epoch": 1.8464, | |
| "grad_norm": 2.0291240215301514, | |
| "learning_rate": 1.7896722231339925e-07, | |
| "loss": 0.3932, | |
| "step": 11540 | |
| }, | |
| { | |
| "epoch": 1.8479999999999999, | |
| "grad_norm": 1.5587979555130005, | |
| "learning_rate": 1.7528380092304842e-07, | |
| "loss": 0.3974, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 1.8496000000000001, | |
| "grad_norm": 1.438423991203308, | |
| "learning_rate": 1.7163800363880102e-07, | |
| "loss": 0.4239, | |
| "step": 11560 | |
| }, | |
| { | |
| "epoch": 1.8512, | |
| "grad_norm": 1.5600086450576782, | |
| "learning_rate": 1.6802985889131762e-07, | |
| "loss": 0.3637, | |
| "step": 11570 | |
| }, | |
| { | |
| "epoch": 1.8528, | |
| "grad_norm": 1.3395024538040161, | |
| "learning_rate": 1.644593948176354e-07, | |
| "loss": 0.4187, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 1.8544, | |
| "grad_norm": 2.943678617477417, | |
| "learning_rate": 1.6092663926094987e-07, | |
| "loss": 0.3772, | |
| "step": 11590 | |
| }, | |
| { | |
| "epoch": 1.8559999999999999, | |
| "grad_norm": 1.837965965270996, | |
| "learning_rate": 1.5743161977039954e-07, | |
| "loss": 0.4144, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 1.8576000000000001, | |
| "grad_norm": 1.7905504703521729, | |
| "learning_rate": 1.5397436360084784e-07, | |
| "loss": 0.3779, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 1.8592, | |
| "grad_norm": 1.7444462776184082, | |
| "learning_rate": 1.5055489771267252e-07, | |
| "loss": 0.3381, | |
| "step": 11620 | |
| }, | |
| { | |
| "epoch": 1.8608, | |
| "grad_norm": 1.8212355375289917, | |
| "learning_rate": 1.4717324877155603e-07, | |
| "loss": 0.3821, | |
| "step": 11630 | |
| }, | |
| { | |
| "epoch": 1.8624, | |
| "grad_norm": 1.5091392993927002, | |
| "learning_rate": 1.438294431482762e-07, | |
| "loss": 0.3618, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 1.8639999999999999, | |
| "grad_norm": 1.5460602045059204, | |
| "learning_rate": 1.405235069185007e-07, | |
| "loss": 0.3168, | |
| "step": 11650 | |
| }, | |
| { | |
| "epoch": 1.8656000000000001, | |
| "grad_norm": 1.2751076221466064, | |
| "learning_rate": 1.3725546586258464e-07, | |
| "loss": 0.3609, | |
| "step": 11660 | |
| }, | |
| { | |
| "epoch": 1.8672, | |
| "grad_norm": 1.6240779161453247, | |
| "learning_rate": 1.3402534546536783e-07, | |
| "loss": 0.3655, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 1.8688, | |
| "grad_norm": 1.3701972961425781, | |
| "learning_rate": 1.3083317091597936e-07, | |
| "loss": 0.3587, | |
| "step": 11680 | |
| }, | |
| { | |
| "epoch": 1.8704, | |
| "grad_norm": 1.626521110534668, | |
| "learning_rate": 1.2767896710763616e-07, | |
| "loss": 0.35, | |
| "step": 11690 | |
| }, | |
| { | |
| "epoch": 1.8719999999999999, | |
| "grad_norm": 1.3935329914093018, | |
| "learning_rate": 1.2456275863745426e-07, | |
| "loss": 0.364, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 1.8736000000000002, | |
| "grad_norm": 1.6910933256149292, | |
| "learning_rate": 1.2148456980625223e-07, | |
| "loss": 0.3897, | |
| "step": 11710 | |
| }, | |
| { | |
| "epoch": 1.8752, | |
| "grad_norm": 1.61421799659729, | |
| "learning_rate": 1.1844442461836636e-07, | |
| "loss": 0.382, | |
| "step": 11720 | |
| }, | |
| { | |
| "epoch": 1.8768, | |
| "grad_norm": 1.3847222328186035, | |
| "learning_rate": 1.1544234678145805e-07, | |
| "loss": 0.3873, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 1.8784, | |
| "grad_norm": 1.7014695405960083, | |
| "learning_rate": 1.1247835970633392e-07, | |
| "loss": 0.3479, | |
| "step": 11740 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "grad_norm": 2.417046308517456, | |
| "learning_rate": 1.0955248650676154e-07, | |
| "loss": 0.4138, | |
| "step": 11750 | |
| }, | |
| { | |
| "epoch": 1.8816000000000002, | |
| "grad_norm": 1.47307288646698, | |
| "learning_rate": 1.0666474999928566e-07, | |
| "loss": 0.3872, | |
| "step": 11760 | |
| }, | |
| { | |
| "epoch": 1.8832, | |
| "grad_norm": 2.0413029193878174, | |
| "learning_rate": 1.0381517270305786e-07, | |
| "loss": 0.4124, | |
| "step": 11770 | |
| }, | |
| { | |
| "epoch": 1.8848, | |
| "grad_norm": 1.732094168663025, | |
| "learning_rate": 1.0100377683965323e-07, | |
| "loss": 0.3723, | |
| "step": 11780 | |
| }, | |
| { | |
| "epoch": 1.8864, | |
| "grad_norm": 1.3991636037826538, | |
| "learning_rate": 9.823058433290178e-08, | |
| "loss": 0.3926, | |
| "step": 11790 | |
| }, | |
| { | |
| "epoch": 1.888, | |
| "grad_norm": 1.6076829433441162, | |
| "learning_rate": 9.549561680871566e-08, | |
| "loss": 0.3681, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 1.8896, | |
| "grad_norm": 1.7450711727142334, | |
| "learning_rate": 9.27988955949205e-08, | |
| "loss": 0.3386, | |
| "step": 11810 | |
| }, | |
| { | |
| "epoch": 1.8912, | |
| "grad_norm": 1.7144626379013062, | |
| "learning_rate": 9.014044172109049e-08, | |
| "loss": 0.36, | |
| "step": 11820 | |
| }, | |
| { | |
| "epoch": 1.8928, | |
| "grad_norm": 1.3775994777679443, | |
| "learning_rate": 8.752027591838352e-08, | |
| "loss": 0.3681, | |
| "step": 11830 | |
| }, | |
| { | |
| "epoch": 1.8944, | |
| "grad_norm": 1.9273160696029663, | |
| "learning_rate": 8.493841861937802e-08, | |
| "loss": 0.4383, | |
| "step": 11840 | |
| }, | |
| { | |
| "epoch": 1.896, | |
| "grad_norm": 1.7852745056152344, | |
| "learning_rate": 8.239488995791633e-08, | |
| "loss": 0.4134, | |
| "step": 11850 | |
| }, | |
| { | |
| "epoch": 1.8976, | |
| "grad_norm": 1.7993406057357788, | |
| "learning_rate": 7.988970976894605e-08, | |
| "loss": 0.3585, | |
| "step": 11860 | |
| }, | |
| { | |
| "epoch": 1.8992, | |
| "grad_norm": 1.8110463619232178, | |
| "learning_rate": 7.742289758836452e-08, | |
| "loss": 0.3941, | |
| "step": 11870 | |
| }, | |
| { | |
| "epoch": 1.9008, | |
| "grad_norm": 1.2032711505889893, | |
| "learning_rate": 7.499447265286952e-08, | |
| "loss": 0.4027, | |
| "step": 11880 | |
| }, | |
| { | |
| "epoch": 1.9024, | |
| "grad_norm": 1.454171061515808, | |
| "learning_rate": 7.260445389980609e-08, | |
| "loss": 0.384, | |
| "step": 11890 | |
| }, | |
| { | |
| "epoch": 1.904, | |
| "grad_norm": 1.7370083332061768, | |
| "learning_rate": 7.025285996702158e-08, | |
| "loss": 0.4031, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 1.9056, | |
| "grad_norm": 1.583922028541565, | |
| "learning_rate": 6.793970919271642e-08, | |
| "loss": 0.3892, | |
| "step": 11910 | |
| }, | |
| { | |
| "epoch": 1.9072, | |
| "grad_norm": 1.2207536697387695, | |
| "learning_rate": 6.566501961530636e-08, | |
| "loss": 0.3417, | |
| "step": 11920 | |
| }, | |
| { | |
| "epoch": 1.9088, | |
| "grad_norm": 1.2948083877563477, | |
| "learning_rate": 6.342880897327597e-08, | |
| "loss": 0.3543, | |
| "step": 11930 | |
| }, | |
| { | |
| "epoch": 1.9104, | |
| "grad_norm": 1.6116557121276855, | |
| "learning_rate": 6.12310947050465e-08, | |
| "loss": 0.3869, | |
| "step": 11940 | |
| }, | |
| { | |
| "epoch": 1.912, | |
| "grad_norm": 1.3642340898513794, | |
| "learning_rate": 5.9071893948835505e-08, | |
| "loss": 0.3565, | |
| "step": 11950 | |
| }, | |
| { | |
| "epoch": 1.9136, | |
| "grad_norm": 1.4018608331680298, | |
| "learning_rate": 5.6951223542522915e-08, | |
| "loss": 0.4061, | |
| "step": 11960 | |
| }, | |
| { | |
| "epoch": 1.9152, | |
| "grad_norm": 1.4921318292617798, | |
| "learning_rate": 5.4869100023523526e-08, | |
| "loss": 0.3269, | |
| "step": 11970 | |
| }, | |
| { | |
| "epoch": 1.9167999999999998, | |
| "grad_norm": 1.4321351051330566, | |
| "learning_rate": 5.282553962865422e-08, | |
| "loss": 0.4137, | |
| "step": 11980 | |
| }, | |
| { | |
| "epoch": 1.9184, | |
| "grad_norm": 1.956453561782837, | |
| "learning_rate": 5.082055829400967e-08, | |
| "loss": 0.4109, | |
| "step": 11990 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 1.5263909101486206, | |
| "learning_rate": 4.885417165483741e-08, | |
| "loss": 0.3375, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 1.9216, | |
| "grad_norm": 1.9736378192901611, | |
| "learning_rate": 4.692639504541518e-08, | |
| "loss": 0.4053, | |
| "step": 12010 | |
| }, | |
| { | |
| "epoch": 1.9232, | |
| "grad_norm": 1.4543331861495972, | |
| "learning_rate": 4.503724349893157e-08, | |
| "loss": 0.3792, | |
| "step": 12020 | |
| }, | |
| { | |
| "epoch": 1.9247999999999998, | |
| "grad_norm": 1.6217784881591797, | |
| "learning_rate": 4.318673174737109e-08, | |
| "loss": 0.374, | |
| "step": 12030 | |
| }, | |
| { | |
| "epoch": 1.9264000000000001, | |
| "grad_norm": 1.4804718494415283, | |
| "learning_rate": 4.137487422139541e-08, | |
| "loss": 0.4202, | |
| "step": 12040 | |
| }, | |
| { | |
| "epoch": 1.928, | |
| "grad_norm": 1.6179620027542114, | |
| "learning_rate": 3.960168505023343e-08, | |
| "loss": 0.3865, | |
| "step": 12050 | |
| }, | |
| { | |
| "epoch": 1.9296, | |
| "grad_norm": 1.618943691253662, | |
| "learning_rate": 3.786717806157136e-08, | |
| "loss": 0.3624, | |
| "step": 12060 | |
| }, | |
| { | |
| "epoch": 1.9312, | |
| "grad_norm": 1.5869625806808472, | |
| "learning_rate": 3.617136678144173e-08, | |
| "loss": 0.3878, | |
| "step": 12070 | |
| }, | |
| { | |
| "epoch": 1.9327999999999999, | |
| "grad_norm": 1.366064190864563, | |
| "learning_rate": 3.451426443412231e-08, | |
| "loss": 0.3704, | |
| "step": 12080 | |
| }, | |
| { | |
| "epoch": 1.9344000000000001, | |
| "grad_norm": 1.835972547531128, | |
| "learning_rate": 3.289588394203014e-08, | |
| "loss": 0.4323, | |
| "step": 12090 | |
| }, | |
| { | |
| "epoch": 1.936, | |
| "grad_norm": 1.5419681072235107, | |
| "learning_rate": 3.131623792562155e-08, | |
| "loss": 0.3528, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 1.9376, | |
| "grad_norm": 1.330670714378357, | |
| "learning_rate": 2.97753387032923e-08, | |
| "loss": 0.3746, | |
| "step": 12110 | |
| }, | |
| { | |
| "epoch": 1.9392, | |
| "grad_norm": 1.853590726852417, | |
| "learning_rate": 2.827319829128594e-08, | |
| "loss": 0.3943, | |
| "step": 12120 | |
| }, | |
| { | |
| "epoch": 1.9407999999999999, | |
| "grad_norm": 1.4271591901779175, | |
| "learning_rate": 2.6809828403593363e-08, | |
| "loss": 0.3933, | |
| "step": 12130 | |
| }, | |
| { | |
| "epoch": 1.9424000000000001, | |
| "grad_norm": 1.4933634996414185, | |
| "learning_rate": 2.5385240451867853e-08, | |
| "loss": 0.3714, | |
| "step": 12140 | |
| }, | |
| { | |
| "epoch": 1.944, | |
| "grad_norm": 1.6965960264205933, | |
| "learning_rate": 2.3999445545332955e-08, | |
| "loss": 0.3352, | |
| "step": 12150 | |
| }, | |
| { | |
| "epoch": 1.9456, | |
| "grad_norm": 1.6812845468521118, | |
| "learning_rate": 2.2652454490694752e-08, | |
| "loss": 0.3648, | |
| "step": 12160 | |
| }, | |
| { | |
| "epoch": 1.9472, | |
| "grad_norm": 1.4611587524414062, | |
| "learning_rate": 2.1344277792060275e-08, | |
| "loss": 0.3819, | |
| "step": 12170 | |
| }, | |
| { | |
| "epoch": 1.9487999999999999, | |
| "grad_norm": 1.4481157064437866, | |
| "learning_rate": 2.0074925650854226e-08, | |
| "loss": 0.3944, | |
| "step": 12180 | |
| }, | |
| { | |
| "epoch": 1.9504000000000001, | |
| "grad_norm": 1.3459042310714722, | |
| "learning_rate": 1.8844407965740165e-08, | |
| "loss": 0.3706, | |
| "step": 12190 | |
| }, | |
| { | |
| "epoch": 1.952, | |
| "grad_norm": 1.517040729522705, | |
| "learning_rate": 1.765273433254111e-08, | |
| "loss": 0.3609, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 1.9536, | |
| "grad_norm": 1.5712956190109253, | |
| "learning_rate": 1.6499914044168508e-08, | |
| "loss": 0.3072, | |
| "step": 12210 | |
| }, | |
| { | |
| "epoch": 1.9552, | |
| "grad_norm": 1.773306965827942, | |
| "learning_rate": 1.538595609054616e-08, | |
| "loss": 0.392, | |
| "step": 12220 | |
| }, | |
| { | |
| "epoch": 1.9567999999999999, | |
| "grad_norm": 2.0914337635040283, | |
| "learning_rate": 1.4310869158541408e-08, | |
| "loss": 0.4145, | |
| "step": 12230 | |
| }, | |
| { | |
| "epoch": 1.9584000000000001, | |
| "grad_norm": 1.853850245475769, | |
| "learning_rate": 1.3274661631899055e-08, | |
| "loss": 0.4023, | |
| "step": 12240 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "grad_norm": 1.7381967306137085, | |
| "learning_rate": 1.2277341591172553e-08, | |
| "loss": 0.3769, | |
| "step": 12250 | |
| }, | |
| { | |
| "epoch": 1.9616, | |
| "grad_norm": 1.5656715631484985, | |
| "learning_rate": 1.1318916813664594e-08, | |
| "loss": 0.3724, | |
| "step": 12260 | |
| }, | |
| { | |
| "epoch": 1.9632, | |
| "grad_norm": 1.7236179113388062, | |
| "learning_rate": 1.039939477336438e-08, | |
| "loss": 0.3583, | |
| "step": 12270 | |
| }, | |
| { | |
| "epoch": 1.9647999999999999, | |
| "grad_norm": 1.6509188413619995, | |
| "learning_rate": 9.518782640888235e-09, | |
| "loss": 0.357, | |
| "step": 12280 | |
| }, | |
| { | |
| "epoch": 1.9664000000000001, | |
| "grad_norm": 1.696998953819275, | |
| "learning_rate": 8.677087283427976e-09, | |
| "loss": 0.368, | |
| "step": 12290 | |
| }, | |
| { | |
| "epoch": 1.968, | |
| "grad_norm": 1.674368143081665, | |
| "learning_rate": 7.874315264692622e-09, | |
| "loss": 0.3885, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 1.9696, | |
| "grad_norm": 1.5840187072753906, | |
| "learning_rate": 7.11047284485844e-09, | |
| "loss": 0.3336, | |
| "step": 12310 | |
| }, | |
| { | |
| "epoch": 1.9712, | |
| "grad_norm": 1.4688122272491455, | |
| "learning_rate": 6.385565980523978e-09, | |
| "loss": 0.369, | |
| "step": 12320 | |
| }, | |
| { | |
| "epoch": 1.9727999999999999, | |
| "grad_norm": 1.461673378944397, | |
| "learning_rate": 5.699600324657328e-09, | |
| "loss": 0.4017, | |
| "step": 12330 | |
| }, | |
| { | |
| "epoch": 1.9744000000000002, | |
| "grad_norm": 1.8351613283157349, | |
| "learning_rate": 5.052581226556719e-09, | |
| "loss": 0.4466, | |
| "step": 12340 | |
| }, | |
| { | |
| "epoch": 1.976, | |
| "grad_norm": 1.826789140701294, | |
| "learning_rate": 4.4445137318072096e-09, | |
| "loss": 0.4012, | |
| "step": 12350 | |
| }, | |
| { | |
| "epoch": 1.9776, | |
| "grad_norm": 1.3256640434265137, | |
| "learning_rate": 3.8754025822407285e-09, | |
| "loss": 0.3696, | |
| "step": 12360 | |
| }, | |
| { | |
| "epoch": 1.9792, | |
| "grad_norm": 1.5135594606399536, | |
| "learning_rate": 3.3452522159010957e-09, | |
| "loss": 0.3607, | |
| "step": 12370 | |
| }, | |
| { | |
| "epoch": 1.9808, | |
| "grad_norm": 1.3603088855743408, | |
| "learning_rate": 2.8540667670073905e-09, | |
| "loss": 0.3569, | |
| "step": 12380 | |
| }, | |
| { | |
| "epoch": 1.9824000000000002, | |
| "grad_norm": 1.8701496124267578, | |
| "learning_rate": 2.4018500659217515e-09, | |
| "loss": 0.397, | |
| "step": 12390 | |
| }, | |
| { | |
| "epoch": 1.984, | |
| "grad_norm": 1.5545977354049683, | |
| "learning_rate": 1.9886056391210663e-09, | |
| "loss": 0.3918, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 1.9856, | |
| "grad_norm": 1.655199408531189, | |
| "learning_rate": 1.6143367091686624e-09, | |
| "loss": 0.3268, | |
| "step": 12410 | |
| }, | |
| { | |
| "epoch": 1.9872, | |
| "grad_norm": 1.6822417974472046, | |
| "learning_rate": 1.2790461946887712e-09, | |
| "loss": 0.38, | |
| "step": 12420 | |
| }, | |
| { | |
| "epoch": 1.9888, | |
| "grad_norm": 1.9061747789382935, | |
| "learning_rate": 9.827367103437679e-10, | |
| "loss": 0.3891, | |
| "step": 12430 | |
| }, | |
| { | |
| "epoch": 1.9904, | |
| "grad_norm": 1.6047697067260742, | |
| "learning_rate": 7.254105668152988e-10, | |
| "loss": 0.3385, | |
| "step": 12440 | |
| }, | |
| { | |
| "epoch": 1.992, | |
| "grad_norm": 1.7033920288085938, | |
| "learning_rate": 5.070697707837413e-10, | |
| "loss": 0.3682, | |
| "step": 12450 | |
| }, | |
| { | |
| "epoch": 1.9936, | |
| "grad_norm": 1.8596231937408447, | |
| "learning_rate": 3.277160249143263e-10, | |
| "loss": 0.3599, | |
| "step": 12460 | |
| }, | |
| { | |
| "epoch": 1.9952, | |
| "grad_norm": 1.3612921237945557, | |
| "learning_rate": 1.873507278438158e-10, | |
| "loss": 0.3236, | |
| "step": 12470 | |
| }, | |
| { | |
| "epoch": 1.9968, | |
| "grad_norm": 1.2464444637298584, | |
| "learning_rate": 8.597497416940048e-11, | |
| "loss": 0.3558, | |
| "step": 12480 | |
| }, | |
| { | |
| "epoch": 1.9984, | |
| "grad_norm": 1.5906494855880737, | |
| "learning_rate": 2.3589554439262807e-11, | |
| "loss": 0.3781, | |
| "step": 12490 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 1.6512545347213745, | |
| "learning_rate": 1.9495514758105516e-13, | |
| "loss": 0.3597, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 12500, | |
| "total_flos": 2.5770017838711767e+18, | |
| "train_loss": 0.4743082572174072, | |
| "train_runtime": 87577.7347, | |
| "train_samples_per_second": 1.142, | |
| "train_steps_per_second": 0.143 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 12500, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 5000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.5770017838711767e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |