| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 939, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 21.8271541595459, | |
| "learning_rate": 6.896551724137931e-07, | |
| "loss": 2.2189, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 21.317724227905273, | |
| "learning_rate": 1.3793103448275862e-06, | |
| "loss": 2.213, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 24.682788848876953, | |
| "learning_rate": 2.0689655172413796e-06, | |
| "loss": 2.4127, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "grad_norm": 20.469785690307617, | |
| "learning_rate": 2.7586206896551725e-06, | |
| "loss": 2.2103, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 16.273277282714844, | |
| "learning_rate": 3.448275862068966e-06, | |
| "loss": 2.1917, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 20.366430282592773, | |
| "learning_rate": 4.137931034482759e-06, | |
| "loss": 2.2657, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 12.321362495422363, | |
| "learning_rate": 4.8275862068965525e-06, | |
| "loss": 2.0721, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 10.076711654663086, | |
| "learning_rate": 5.517241379310345e-06, | |
| "loss": 2.0023, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.338404178619385, | |
| "learning_rate": 6.206896551724138e-06, | |
| "loss": 1.8252, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.978753566741943, | |
| "learning_rate": 6.896551724137932e-06, | |
| "loss": 1.7743, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.377879619598389, | |
| "learning_rate": 7.586206896551724e-06, | |
| "loss": 1.8868, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.3909735679626465, | |
| "learning_rate": 8.275862068965518e-06, | |
| "loss": 1.6738, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 5.7440361976623535, | |
| "learning_rate": 8.965517241379312e-06, | |
| "loss": 1.8213, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 4.620378017425537, | |
| "learning_rate": 9.655172413793105e-06, | |
| "loss": 1.6853, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.989961862564087, | |
| "learning_rate": 1.0344827586206898e-05, | |
| "loss": 1.7233, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.9967751502990723, | |
| "learning_rate": 1.103448275862069e-05, | |
| "loss": 1.6611, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.2126998901367188, | |
| "learning_rate": 1.1724137931034483e-05, | |
| "loss": 1.5849, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 4.3291015625, | |
| "learning_rate": 1.2413793103448277e-05, | |
| "loss": 1.6643, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.767850637435913, | |
| "learning_rate": 1.310344827586207e-05, | |
| "loss": 1.6024, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.049321174621582, | |
| "learning_rate": 1.3793103448275863e-05, | |
| "loss": 1.5756, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 3.590315103530884, | |
| "learning_rate": 1.4482758620689657e-05, | |
| "loss": 1.6506, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.2997117042541504, | |
| "learning_rate": 1.5172413793103448e-05, | |
| "loss": 1.5368, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 2.2902848720550537, | |
| "learning_rate": 1.586206896551724e-05, | |
| "loss": 1.5619, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 3.0407345294952393, | |
| "learning_rate": 1.6551724137931037e-05, | |
| "loss": 1.5331, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.0579607486724854, | |
| "learning_rate": 1.7241379310344828e-05, | |
| "loss": 1.5092, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.0638997554779053, | |
| "learning_rate": 1.7931034482758623e-05, | |
| "loss": 1.5125, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.123168706893921, | |
| "learning_rate": 1.8620689655172415e-05, | |
| "loss": 1.5159, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.7672958374023438, | |
| "learning_rate": 1.931034482758621e-05, | |
| "loss": 1.5182, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.8967580795288086, | |
| "learning_rate": 2e-05, | |
| "loss": 1.4779, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.8136967420578003, | |
| "learning_rate": 1.9999940408195878e-05, | |
| "loss": 1.488, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.173333168029785, | |
| "learning_rate": 1.9999761633493754e-05, | |
| "loss": 1.4982, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 2.286966323852539, | |
| "learning_rate": 1.9999463678024317e-05, | |
| "loss": 1.4557, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.681356430053711, | |
| "learning_rate": 1.999904654533872e-05, | |
| "loss": 1.4516, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.1232216358184814, | |
| "learning_rate": 1.9998510240408495e-05, | |
| "loss": 1.4285, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.7466108798980713, | |
| "learning_rate": 1.999785476962552e-05, | |
| "loss": 1.431, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 2.2526700496673584, | |
| "learning_rate": 1.9997080140801932e-05, | |
| "loss": 1.3769, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.909569263458252, | |
| "learning_rate": 1.9996186363170037e-05, | |
| "loss": 1.4519, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.6817132234573364, | |
| "learning_rate": 1.9995173447382193e-05, | |
| "loss": 1.4075, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 3.831782102584839, | |
| "learning_rate": 1.9994041405510705e-05, | |
| "loss": 1.3883, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.8575108051300049, | |
| "learning_rate": 1.9992790251047655e-05, | |
| "loss": 1.417, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.7353404760360718, | |
| "learning_rate": 1.999141999890475e-05, | |
| "loss": 1.413, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.9283984899520874, | |
| "learning_rate": 1.9989930665413148e-05, | |
| "loss": 1.4015, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 3.5153067111968994, | |
| "learning_rate": 1.998832226832327e-05, | |
| "loss": 1.3681, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.6726486682891846, | |
| "learning_rate": 1.9986594826804563e-05, | |
| "loss": 1.38, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.8287858963012695, | |
| "learning_rate": 1.9984748361445306e-05, | |
| "loss": 1.3932, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.9177086353302002, | |
| "learning_rate": 1.998278289425234e-05, | |
| "loss": 1.3887, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.1517434120178223, | |
| "learning_rate": 1.9980698448650805e-05, | |
| "loss": 1.3311, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.5224231481552124, | |
| "learning_rate": 1.9978495049483883e-05, | |
| "loss": 1.3961, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.500178575515747, | |
| "learning_rate": 1.997617272301248e-05, | |
| "loss": 1.3498, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.6448391675949097, | |
| "learning_rate": 1.9973731496914914e-05, | |
| "loss": 1.3436, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 2.1867380142211914, | |
| "learning_rate": 1.9971171400286602e-05, | |
| "loss": 1.3163, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.094888925552368, | |
| "learning_rate": 1.9968492463639704e-05, | |
| "loss": 1.3607, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.287102460861206, | |
| "learning_rate": 1.9965694718902745e-05, | |
| "loss": 1.3408, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 4.418356418609619, | |
| "learning_rate": 1.9962778199420265e-05, | |
| "loss": 1.3595, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 5.028092861175537, | |
| "learning_rate": 1.9959742939952393e-05, | |
| "loss": 1.3253, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.9939210414886475, | |
| "learning_rate": 1.9956588976674442e-05, | |
| "loss": 1.3424, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 4.739817142486572, | |
| "learning_rate": 1.995331634717649e-05, | |
| "loss": 1.3361, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 2.713473081588745, | |
| "learning_rate": 1.994992509046291e-05, | |
| "loss": 1.2617, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 5.3400654792785645, | |
| "learning_rate": 1.9946415246951928e-05, | |
| "loss": 1.3687, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 3.8738880157470703, | |
| "learning_rate": 1.9942786858475126e-05, | |
| "loss": 1.345, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 3.6597769260406494, | |
| "learning_rate": 1.9939039968276942e-05, | |
| "loss": 1.325, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.741032838821411, | |
| "learning_rate": 1.9935174621014173e-05, | |
| "loss": 1.3066, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.1187946796417236, | |
| "learning_rate": 1.9931190862755416e-05, | |
| "loss": 1.3398, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.129025459289551, | |
| "learning_rate": 1.992708874098054e-05, | |
| "loss": 1.3603, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 4.482194900512695, | |
| "learning_rate": 1.992286830458012e-05, | |
| "loss": 1.2851, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.7002497911453247, | |
| "learning_rate": 1.9918529603854825e-05, | |
| "loss": 1.3249, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 3.0181782245635986, | |
| "learning_rate": 1.991407269051487e-05, | |
| "loss": 1.307, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.2545225620269775, | |
| "learning_rate": 1.990949761767935e-05, | |
| "loss": 1.3333, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.612231492996216, | |
| "learning_rate": 1.9904804439875635e-05, | |
| "loss": 1.29, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 2.2334043979644775, | |
| "learning_rate": 1.989999321303871e-05, | |
| "loss": 1.3262, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.777111768722534, | |
| "learning_rate": 1.9895063994510512e-05, | |
| "loss": 1.2641, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.7548855543136597, | |
| "learning_rate": 1.989001684303925e-05, | |
| "loss": 1.3394, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.8646557331085205, | |
| "learning_rate": 1.9884851818778695e-05, | |
| "loss": 1.3197, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.4656805992126465, | |
| "learning_rate": 1.9879568983287468e-05, | |
| "loss": 1.289, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.8441545963287354, | |
| "learning_rate": 1.9874168399528307e-05, | |
| "loss": 1.2713, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 2.9740052223205566, | |
| "learning_rate": 1.986865013186732e-05, | |
| "loss": 1.2749, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.2040226459503174, | |
| "learning_rate": 1.9863014246073216e-05, | |
| "loss": 1.3199, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 3.2927985191345215, | |
| "learning_rate": 1.985726080931651e-05, | |
| "loss": 1.2894, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.6203821897506714, | |
| "learning_rate": 1.9851389890168738e-05, | |
| "loss": 1.2966, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.5133774280548096, | |
| "learning_rate": 1.9845401558601634e-05, | |
| "loss": 1.2563, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.6008837223052979, | |
| "learning_rate": 1.98392958859863e-05, | |
| "loss": 1.2594, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.3777157068252563, | |
| "learning_rate": 1.9833072945092334e-05, | |
| "loss": 1.2984, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.578803300857544, | |
| "learning_rate": 1.9826732810087e-05, | |
| "loss": 1.2661, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.549088716506958, | |
| "learning_rate": 1.9820275556534306e-05, | |
| "loss": 1.2546, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.3274394273757935, | |
| "learning_rate": 1.9813701261394136e-05, | |
| "loss": 1.2699, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.032712936401367, | |
| "learning_rate": 1.980701000302131e-05, | |
| "loss": 1.2846, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 3.147608995437622, | |
| "learning_rate": 1.9800201861164665e-05, | |
| "loss": 1.2084, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.5028866529464722, | |
| "learning_rate": 1.979327691696608e-05, | |
| "loss": 1.3017, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 2.2231180667877197, | |
| "learning_rate": 1.9786235252959555e-05, | |
| "loss": 1.2901, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 3.0596678256988525, | |
| "learning_rate": 1.977907695307017e-05, | |
| "loss": 1.1663, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.9483755826950073, | |
| "learning_rate": 1.9771802102613127e-05, | |
| "loss": 1.2239, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.2688753604888916, | |
| "learning_rate": 1.9764410788292724e-05, | |
| "loss": 1.3146, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 5.2566375732421875, | |
| "learning_rate": 1.975690309820131e-05, | |
| "loss": 1.2921, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.80307936668396, | |
| "learning_rate": 1.9749279121818235e-05, | |
| "loss": 1.1843, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.7207767963409424, | |
| "learning_rate": 1.9741538950008817e-05, | |
| "loss": 1.3001, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 3.1365432739257812, | |
| "learning_rate": 1.9733682675023207e-05, | |
| "loss": 1.2889, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.914738178253174, | |
| "learning_rate": 1.972571039049533e-05, | |
| "loss": 1.2628, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 2.3677148818969727, | |
| "learning_rate": 1.971762219144174e-05, | |
| "loss": 1.1992, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.707825183868408, | |
| "learning_rate": 1.9709418174260523e-05, | |
| "loss": 1.2696, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.2368316650390625, | |
| "learning_rate": 1.9701098436730108e-05, | |
| "loss": 1.3151, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 4.441848278045654, | |
| "learning_rate": 1.969266307800813e-05, | |
| "loss": 1.2748, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.297125816345215, | |
| "learning_rate": 1.9684112198630246e-05, | |
| "loss": 1.161, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.532125473022461, | |
| "learning_rate": 1.967544590050891e-05, | |
| "loss": 1.279, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 3.045705795288086, | |
| "learning_rate": 1.9666664286932198e-05, | |
| "loss": 1.2325, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 2.2214536666870117, | |
| "learning_rate": 1.9657767462562544e-05, | |
| "loss": 1.1835, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.4017865657806396, | |
| "learning_rate": 1.9648755533435517e-05, | |
| "loss": 1.2709, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.5362350940704346, | |
| "learning_rate": 1.9639628606958535e-05, | |
| "loss": 1.2936, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 3.004079818725586, | |
| "learning_rate": 1.96303867919096e-05, | |
| "loss": 1.1838, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.6795995235443115, | |
| "learning_rate": 1.9621030198436007e-05, | |
| "loss": 1.1938, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.5308665037155151, | |
| "learning_rate": 1.9611558938053003e-05, | |
| "loss": 1.2678, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.875738501548767, | |
| "learning_rate": 1.9601973123642493e-05, | |
| "loss": 1.2761, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.533500909805298, | |
| "learning_rate": 1.9592272869451672e-05, | |
| "loss": 1.1581, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.3702383041381836, | |
| "learning_rate": 1.9582458291091664e-05, | |
| "loss": 1.283, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.8049883842468262, | |
| "learning_rate": 1.957252950553616e-05, | |
| "loss": 1.2012, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.5067845582962036, | |
| "learning_rate": 1.9562486631120007e-05, | |
| "loss": 1.2585, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 2.059365749359131, | |
| "learning_rate": 1.9552329787537805e-05, | |
| "loss": 1.2147, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.8772664070129395, | |
| "learning_rate": 1.9542059095842484e-05, | |
| "loss": 1.2126, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.5692819356918335, | |
| "learning_rate": 1.9531674678443853e-05, | |
| "loss": 1.1856, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.549210548400879, | |
| "learning_rate": 1.952117665910714e-05, | |
| "loss": 1.2705, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.3374148607254028, | |
| "learning_rate": 1.9510565162951538e-05, | |
| "loss": 1.2165, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.695432186126709, | |
| "learning_rate": 1.9499840316448675e-05, | |
| "loss": 1.208, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.3880375623703003, | |
| "learning_rate": 1.948900224742115e-05, | |
| "loss": 1.2452, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 2.0581412315368652, | |
| "learning_rate": 1.9478051085040978e-05, | |
| "loss": 1.1639, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.481013536453247, | |
| "learning_rate": 1.9466986959828063e-05, | |
| "loss": 1.1937, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.4966742992401123, | |
| "learning_rate": 1.945581000364864e-05, | |
| "loss": 1.208, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.448712944984436, | |
| "learning_rate": 1.9444520349713705e-05, | |
| "loss": 1.2616, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6555765867233276, | |
| "learning_rate": 1.9433118132577432e-05, | |
| "loss": 1.1929, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.4194772243499756, | |
| "learning_rate": 1.942160348813556e-05, | |
| "loss": 1.2034, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.381339430809021, | |
| "learning_rate": 1.9409976553623767e-05, | |
| "loss": 1.2593, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.3500454425811768, | |
| "learning_rate": 1.9398237467616063e-05, | |
| "loss": 1.182, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6402866840362549, | |
| "learning_rate": 1.9386386370023104e-05, | |
| "loss": 1.2118, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.568561315536499, | |
| "learning_rate": 1.9374423402090553e-05, | |
| "loss": 1.1913, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.2762295007705688, | |
| "learning_rate": 1.9362348706397374e-05, | |
| "loss": 1.2218, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 2.5442495346069336, | |
| "learning_rate": 1.9350162426854152e-05, | |
| "loss": 1.1526, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.2681175470352173, | |
| "learning_rate": 1.933786470870136e-05, | |
| "loss": 1.2303, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.5894718170166016, | |
| "learning_rate": 1.9325455698507638e-05, | |
| "loss": 1.2514, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.1486873626708984, | |
| "learning_rate": 1.931293554416805e-05, | |
| "loss": 1.1334, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.4406920671463013, | |
| "learning_rate": 1.9300304394902315e-05, | |
| "loss": 1.1837, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.2673237323760986, | |
| "learning_rate": 1.9287562401253023e-05, | |
| "loss": 1.2832, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 3.6456334590911865, | |
| "learning_rate": 1.927470971508386e-05, | |
| "loss": 1.186, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.6070517301559448, | |
| "learning_rate": 1.9261746489577767e-05, | |
| "loss": 1.1134, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.6685227155685425, | |
| "learning_rate": 1.924867287923515e-05, | |
| "loss": 1.2377, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 2.0820910930633545, | |
| "learning_rate": 1.923548903987201e-05, | |
| "loss": 1.2474, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.6539766788482666, | |
| "learning_rate": 1.9222195128618108e-05, | |
| "loss": 1.1783, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.6540615558624268, | |
| "learning_rate": 1.9208791303915063e-05, | |
| "loss": 1.107, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.5331521034240723, | |
| "learning_rate": 1.919527772551451e-05, | |
| "loss": 1.2159, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.7439680099487305, | |
| "learning_rate": 1.918165455447614e-05, | |
| "loss": 1.2307, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.9292418956756592, | |
| "learning_rate": 1.9167921953165827e-05, | |
| "loss": 1.1922, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.8799481391906738, | |
| "learning_rate": 1.9154080085253665e-05, | |
| "loss": 1.0591, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.481303334236145, | |
| "learning_rate": 1.9140129115712035e-05, | |
| "loss": 1.2249, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.6806588172912598, | |
| "learning_rate": 1.912606921081362e-05, | |
| "loss": 1.236, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.8849890232086182, | |
| "learning_rate": 1.9111900538129443e-05, | |
| "loss": 1.153, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.3254696130752563, | |
| "learning_rate": 1.909762326652686e-05, | |
| "loss": 1.2062, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.4039585590362549, | |
| "learning_rate": 1.908323756616754e-05, | |
| "loss": 1.2113, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.4949158430099487, | |
| "learning_rate": 1.9068743608505454e-05, | |
| "loss": 1.1661, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 2.0551376342773438, | |
| "learning_rate": 1.9054141566284822e-05, | |
| "loss": 1.1227, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.2707809209823608, | |
| "learning_rate": 1.9039431613538047e-05, | |
| "loss": 1.217, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.4150937795639038, | |
| "learning_rate": 1.9024613925583652e-05, | |
| "loss": 1.2083, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.52974271774292, | |
| "learning_rate": 1.900968867902419e-05, | |
| "loss": 1.1602, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.6180052757263184, | |
| "learning_rate": 1.899465605174414e-05, | |
| "loss": 1.1741, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.344632625579834, | |
| "learning_rate": 1.8979516222907776e-05, | |
| "loss": 1.17, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.3710107803344727, | |
| "learning_rate": 1.896426937295704e-05, | |
| "loss": 1.2173, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.8059217929840088, | |
| "learning_rate": 1.8948915683609387e-05, | |
| "loss": 1.1522, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.8498823642730713, | |
| "learning_rate": 1.8933455337855633e-05, | |
| "loss": 1.0953, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.4292060136795044, | |
| "learning_rate": 1.8917888519957756e-05, | |
| "loss": 1.1505, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.4529762268066406, | |
| "learning_rate": 1.89022154154467e-05, | |
| "loss": 1.218, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.5634841918945312, | |
| "learning_rate": 1.8886436211120195e-05, | |
| "loss": 1.2195, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.3348324298858643, | |
| "learning_rate": 1.8870551095040476e-05, | |
| "loss": 1.1384, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.3607831001281738, | |
| "learning_rate": 1.8854560256532098e-05, | |
| "loss": 1.2035, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.3515021800994873, | |
| "learning_rate": 1.8838463886179647e-05, | |
| "loss": 1.1397, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.9323036670684814, | |
| "learning_rate": 1.8822262175825463e-05, | |
| "loss": 1.1635, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.2316958904266357, | |
| "learning_rate": 1.880595531856738e-05, | |
| "loss": 1.2009, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.5740736722946167, | |
| "learning_rate": 1.878954350875641e-05, | |
| "loss": 1.1694, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.584872841835022, | |
| "learning_rate": 1.877302694199442e-05, | |
| "loss": 1.1544, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.4253188371658325, | |
| "learning_rate": 1.8756405815131815e-05, | |
| "loss": 1.1496, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.2769767045974731, | |
| "learning_rate": 1.873968032626518e-05, | |
| "loss": 1.199, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.4851347208023071, | |
| "learning_rate": 1.872285067473493e-05, | |
| "loss": 1.1609, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.4933650493621826, | |
| "learning_rate": 1.8705917061122917e-05, | |
| "loss": 1.1516, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.471543788909912, | |
| "learning_rate": 1.8688879687250067e-05, | |
| "loss": 1.1409, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.1612536907196045, | |
| "learning_rate": 1.8671738756173946e-05, | |
| "loss": 1.1791, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.6514532566070557, | |
| "learning_rate": 1.8654494472186352e-05, | |
| "loss": 1.0852, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.3452060222625732, | |
| "learning_rate": 1.8637147040810884e-05, | |
| "loss": 1.193, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.2717292308807373, | |
| "learning_rate": 1.8619696668800494e-05, | |
| "loss": 1.1856, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.2781697511672974, | |
| "learning_rate": 1.860214356413501e-05, | |
| "loss": 1.1319, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.4733614921569824, | |
| "learning_rate": 1.8584487936018663e-05, | |
| "loss": 1.1452, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.2057603597640991, | |
| "learning_rate": 1.8566729994877604e-05, | |
| "loss": 1.1916, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.3118027448654175, | |
| "learning_rate": 1.854886995235738e-05, | |
| "loss": 1.1496, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.5732028484344482, | |
| "learning_rate": 1.8530908021320427e-05, | |
| "loss": 1.0625, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.2113425731658936, | |
| "learning_rate": 1.8512844415843514e-05, | |
| "loss": 1.2074, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.3583440780639648, | |
| "learning_rate": 1.8494679351215212e-05, | |
| "loss": 1.1985, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.3396681547164917, | |
| "learning_rate": 1.8476413043933316e-05, | |
| "loss": 1.1486, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.574687123298645, | |
| "learning_rate": 1.8458045711702264e-05, | |
| "loss": 1.0996, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.185196042060852, | |
| "learning_rate": 1.8439577573430557e-05, | |
| "loss": 1.185, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.3120192289352417, | |
| "learning_rate": 1.842100884922812e-05, | |
| "loss": 1.1842, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.2736554145812988, | |
| "learning_rate": 1.8402339760403715e-05, | |
| "loss": 1.1952, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.411303997039795, | |
| "learning_rate": 1.8383570529462273e-05, | |
| "loss": 1.0729, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.976739525794983, | |
| "learning_rate": 1.8364701380102267e-05, | |
| "loss": 1.0576, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.171024203300476, | |
| "learning_rate": 1.834573253721303e-05, | |
| "loss": 1.213, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.4460878372192383, | |
| "learning_rate": 1.8326664226872063e-05, | |
| "loss": 1.1902, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.818941354751587, | |
| "learning_rate": 1.8307496676342384e-05, | |
| "loss": 1.0703, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 1.3394752740859985, | |
| "learning_rate": 1.828823011406977e-05, | |
| "loss": 1.2043, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.5700968503952026, | |
| "learning_rate": 1.8268864769680054e-05, | |
| "loss": 1.2027, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 2.0403037071228027, | |
| "learning_rate": 1.824940087397641e-05, | |
| "loss": 1.0066, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.2497912645339966, | |
| "learning_rate": 1.8229838658936566e-05, | |
| "loss": 1.2221, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.8370343446731567, | |
| "learning_rate": 1.8210178357710057e-05, | |
| "loss": 1.1774, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.626924991607666, | |
| "learning_rate": 1.819042020461545e-05, | |
| "loss": 1.1431, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.6762784719467163, | |
| "learning_rate": 1.8170564435137542e-05, | |
| "loss": 1.1272, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.6374855041503906, | |
| "learning_rate": 1.8150611285924556e-05, | |
| "loss": 1.1316, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.4332759380340576, | |
| "learning_rate": 1.8130560994785325e-05, | |
| "loss": 1.1963, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.3836238384246826, | |
| "learning_rate": 1.8110413800686456e-05, | |
| "loss": 1.1922, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.6723711490631104, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 1.0374, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.4377235174179077, | |
| "learning_rate": 1.8069829665247975e-05, | |
| "loss": 1.0966, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.2402416467666626, | |
| "learning_rate": 1.8049393207604734e-05, | |
| "loss": 1.2414, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 2.789266586303711, | |
| "learning_rate": 1.8028860814388826e-05, | |
| "loss": 1.0918, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.4653987884521484, | |
| "learning_rate": 1.8008232730312724e-05, | |
| "loss": 1.0351, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.840067982673645, | |
| "learning_rate": 1.7987509201229378e-05, | |
| "loss": 1.1962, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.6036863327026367, | |
| "learning_rate": 1.7966690474129285e-05, | |
| "loss": 1.1977, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.8035465478897095, | |
| "learning_rate": 1.7945776797137544e-05, | |
| "loss": 1.0516, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.3571094274520874, | |
| "learning_rate": 1.7924768419510906e-05, | |
| "loss": 1.1762, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 3.8855533599853516, | |
| "learning_rate": 1.7903665591634794e-05, | |
| "loss": 1.1755, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.5543122291564941, | |
| "learning_rate": 1.7882468565020327e-05, | |
| "loss": 1.1033, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.7055552005767822, | |
| "learning_rate": 1.786117759230132e-05, | |
| "loss": 1.1154, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.7987192869186401, | |
| "learning_rate": 1.7839792927231253e-05, | |
| "loss": 1.1642, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.426632285118103, | |
| "learning_rate": 1.78183148246803e-05, | |
| "loss": 1.12, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 2.141639471054077, | |
| "learning_rate": 1.7796743540632226e-05, | |
| "loss": 1.0918, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.5163425207138062, | |
| "learning_rate": 1.777507933218138e-05, | |
| "loss": 1.1822, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.4098217487335205, | |
| "learning_rate": 1.7753322457529615e-05, | |
| "loss": 1.1135, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.8764716386795044, | |
| "learning_rate": 1.7731473175983215e-05, | |
| "loss": 1.1285, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.6687111854553223, | |
| "learning_rate": 1.7709531747949796e-05, | |
| "loss": 1.1109, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.311031699180603, | |
| "learning_rate": 1.7687498434935224e-05, | |
| "loss": 1.1821, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.7984453439712524, | |
| "learning_rate": 1.7665373499540464e-05, | |
| "loss": 1.1187, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.4364324808120728, | |
| "learning_rate": 1.7643157205458483e-05, | |
| "loss": 1.1631, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.6001217365264893, | |
| "learning_rate": 1.7620849817471094e-05, | |
| "loss": 1.0721, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.3116364479064941, | |
| "learning_rate": 1.759845160144579e-05, | |
| "loss": 1.1345, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.3075991868972778, | |
| "learning_rate": 1.7575962824332595e-05, | |
| "loss": 1.1817, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 2.15160870552063, | |
| "learning_rate": 1.7553383754160864e-05, | |
| "loss": 1.0318, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.333544373512268, | |
| "learning_rate": 1.7530714660036112e-05, | |
| "loss": 1.115, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.5261927843093872, | |
| "learning_rate": 1.7507955812136775e-05, | |
| "loss": 1.1577, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.5996551513671875, | |
| "learning_rate": 1.7485107481711014e-05, | |
| "loss": 1.1326, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.5833766460418701, | |
| "learning_rate": 1.7462169941073478e-05, | |
| "loss": 1.1098, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.3292502164840698, | |
| "learning_rate": 1.7439143463602052e-05, | |
| "loss": 1.1705, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.2800406217575073, | |
| "learning_rate": 1.74160283237346e-05, | |
| "loss": 1.166, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.5827293395996094, | |
| "learning_rate": 1.7392824796965703e-05, | |
| "loss": 1.1105, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.0156748294830322, | |
| "learning_rate": 1.7369533159843368e-05, | |
| "loss": 1.0359, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.2971017360687256, | |
| "learning_rate": 1.734615368996573e-05, | |
| "loss": 1.1967, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.3633853197097778, | |
| "learning_rate": 1.7322686665977738e-05, | |
| "loss": 1.157, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.7685860395431519, | |
| "learning_rate": 1.7299132367567856e-05, | |
| "loss": 1.049, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 1.6905012130737305, | |
| "learning_rate": 1.7275491075464716e-05, | |
| "loss": 1.0922, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.3189204931259155, | |
| "learning_rate": 1.7251763071433767e-05, | |
| "loss": 1.1474, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.5270112752914429, | |
| "learning_rate": 1.7227948638273918e-05, | |
| "loss": 1.1165, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.830621361732483, | |
| "learning_rate": 1.7204048059814175e-05, | |
| "loss": 1.0589, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.2493808269500732, | |
| "learning_rate": 1.7180061620910263e-05, | |
| "loss": 1.234, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.534658670425415, | |
| "learning_rate": 1.715598960744121e-05, | |
| "loss": 1.064, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.515418529510498, | |
| "learning_rate": 1.7131832306305964e-05, | |
| "loss": 1.1001, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.3037127256393433, | |
| "learning_rate": 1.710759000541995e-05, | |
| "loss": 1.1225, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.3684110641479492, | |
| "learning_rate": 1.7083262993711663e-05, | |
| "loss": 1.1695, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.2716645002365112, | |
| "learning_rate": 1.7058851561119198e-05, | |
| "loss": 1.182, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.5303761959075928, | |
| "learning_rate": 1.7034355998586828e-05, | |
| "loss": 1.0063, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.4734691381454468, | |
| "learning_rate": 1.7009776598061496e-05, | |
| "loss": 1.095, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.2253527641296387, | |
| "learning_rate": 1.6985113652489374e-05, | |
| "loss": 1.2601, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.5910344123840332, | |
| "learning_rate": 1.6960367455812336e-05, | |
| "loss": 1.0323, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.402156114578247, | |
| "learning_rate": 1.6935538302964496e-05, | |
| "loss": 1.0294, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.2129980325698853, | |
| "learning_rate": 1.691062648986865e-05, | |
| "loss": 1.1695, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.4583830833435059, | |
| "learning_rate": 1.6885632313432772e-05, | |
| "loss": 1.1776, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.3347725868225098, | |
| "learning_rate": 1.686055607154648e-05, | |
| "loss": 1.1044, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.5016978979110718, | |
| "learning_rate": 1.6835398063077476e-05, | |
| "loss": 1.0461, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.1998066902160645, | |
| "learning_rate": 1.6810158587867973e-05, | |
| "loss": 1.1683, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.536724328994751, | |
| "learning_rate": 1.6784837946731148e-05, | |
| "loss": 1.0198, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.37685227394104, | |
| "learning_rate": 1.6759436441447544e-05, | |
| "loss": 1.0855, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.2801436185836792, | |
| "learning_rate": 1.673395437476146e-05, | |
| "loss": 1.1562, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.278853178024292, | |
| "learning_rate": 1.6708392050377365e-05, | |
| "loss": 1.0865, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.3824177980422974, | |
| "learning_rate": 1.668274977295626e-05, | |
| "loss": 1.078, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.1306382417678833, | |
| "learning_rate": 1.6657027848112064e-05, | |
| "loss": 1.1541, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.2830414772033691, | |
| "learning_rate": 1.6631226582407954e-05, | |
| "loss": 1.0737, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.164262056350708, | |
| "learning_rate": 1.660534628335273e-05, | |
| "loss": 1.1609, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.1950126886367798, | |
| "learning_rate": 1.657938725939713e-05, | |
| "loss": 1.1186, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.247334599494934, | |
| "learning_rate": 1.6553349819930167e-05, | |
| "loss": 1.1044, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.275217890739441, | |
| "learning_rate": 1.6527234275275445e-05, | |
| "loss": 1.0827, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.1824593544006348, | |
| "learning_rate": 1.6501040936687444e-05, | |
| "loss": 1.1472, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.2874646186828613, | |
| "learning_rate": 1.6474770116347824e-05, | |
| "loss": 1.0782, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.3628506660461426, | |
| "learning_rate": 1.6448422127361707e-05, | |
| "loss": 0.9846, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.2074543237686157, | |
| "learning_rate": 1.6421997283753928e-05, | |
| "loss": 1.1698, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.6275904178619385, | |
| "learning_rate": 1.6395495900465306e-05, | |
| "loss": 0.999, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.2207213640213013, | |
| "learning_rate": 1.6368918293348893e-05, | |
| "loss": 1.1138, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.260969638824463, | |
| "learning_rate": 1.63422647791662e-05, | |
| "loss": 1.1467, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.2264899015426636, | |
| "learning_rate": 1.6315535675583425e-05, | |
| "loss": 1.1025, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.309613585472107, | |
| "learning_rate": 1.6288731301167667e-05, | |
| "loss": 1.0694, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.2716935873031616, | |
| "learning_rate": 1.626185197538314e-05, | |
| "loss": 1.1388, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.2847307920455933, | |
| "learning_rate": 1.6234898018587336e-05, | |
| "loss": 1.1698, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.230944037437439, | |
| "learning_rate": 1.6207869752027248e-05, | |
| "loss": 1.0857, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.4834110736846924, | |
| "learning_rate": 1.6180767497835503e-05, | |
| "loss": 1.0989, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.4195455312728882, | |
| "learning_rate": 1.6153591579026545e-05, | |
| "loss": 1.0674, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.2081892490386963, | |
| "learning_rate": 1.6126342319492784e-05, | |
| "loss": 1.1507, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 2.0526626110076904, | |
| "learning_rate": 1.609902004400073e-05, | |
| "loss": 1.013, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.2041497230529785, | |
| "learning_rate": 1.6071625078187113e-05, | |
| "loss": 1.1409, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.406616449356079, | |
| "learning_rate": 1.6044157748555024e-05, | |
| "loss": 1.1526, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.5346976518630981, | |
| "learning_rate": 1.6016618382470014e-05, | |
| "loss": 1.0737, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.1958671808242798, | |
| "learning_rate": 1.598900730815617e-05, | |
| "loss": 1.0733, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.3337523937225342, | |
| "learning_rate": 1.5961324854692254e-05, | |
| "loss": 1.1471, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.4366693496704102, | |
| "learning_rate": 1.593357135200773e-05, | |
| "loss": 1.0917, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.7546011209487915, | |
| "learning_rate": 1.5905747130878853e-05, | |
| "loss": 1.0061, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.496777057647705, | |
| "learning_rate": 1.5877852522924733e-05, | |
| "loss": 1.0767, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.4293081760406494, | |
| "learning_rate": 1.5849887860603374e-05, | |
| "loss": 1.1606, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.372736930847168, | |
| "learning_rate": 1.582185347720771e-05, | |
| "loss": 1.0714, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.6297597885131836, | |
| "learning_rate": 1.5793749706861637e-05, | |
| "loss": 1.0541, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.2471482753753662, | |
| "learning_rate": 1.576557688451603e-05, | |
| "loss": 1.1397, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.1925361156463623, | |
| "learning_rate": 1.5737335345944758e-05, | |
| "loss": 1.1662, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.362283706665039, | |
| "learning_rate": 1.570902542774066e-05, | |
| "loss": 1.0786, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.6759191751480103, | |
| "learning_rate": 1.568064746731156e-05, | |
| "loss": 1.0494, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.1895592212677002, | |
| "learning_rate": 1.5652201802876227e-05, | |
| "loss": 1.2156, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.6639962196350098, | |
| "learning_rate": 1.5623688773460358e-05, | |
| "loss": 1.075, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.2729833126068115, | |
| "learning_rate": 1.559510871889252e-05, | |
| "loss": 1.0769, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.1911274194717407, | |
| "learning_rate": 1.556646197980012e-05, | |
| "loss": 1.0927, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.1913120746612549, | |
| "learning_rate": 1.553774889760533e-05, | |
| "loss": 1.138, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.2033076286315918, | |
| "learning_rate": 1.5508969814521026e-05, | |
| "loss": 1.1261, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.653771996498108, | |
| "learning_rate": 1.5480125073546705e-05, | |
| "loss": 0.972, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.1409915685653687, | |
| "learning_rate": 1.5451215018464386e-05, | |
| "loss": 1.1324, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.1707923412322998, | |
| "learning_rate": 1.542223999383455e-05, | |
| "loss": 1.087, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.6266124248504639, | |
| "learning_rate": 1.5393200344991993e-05, | |
| "loss": 1.079, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.1634387969970703, | |
| "learning_rate": 1.5364096418041723e-05, | |
| "loss": 1.0632, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.1764509677886963, | |
| "learning_rate": 1.533492855985485e-05, | |
| "loss": 1.0774, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.2050230503082275, | |
| "learning_rate": 1.530569711806443e-05, | |
| "loss": 1.1361, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.335336685180664, | |
| "learning_rate": 1.527640244106133e-05, | |
| "loss": 1.1467, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.3851240873336792, | |
| "learning_rate": 1.524704487799008e-05, | |
| "loss": 1.0607, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.173155665397644, | |
| "learning_rate": 1.5217624778744718e-05, | |
| "loss": 1.1566, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.4441406726837158, | |
| "learning_rate": 1.5188142493964595e-05, | |
| "loss": 1.0578, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.3076049089431763, | |
| "learning_rate": 1.5158598375030218e-05, | |
| "loss": 1.0744, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.2257764339447021, | |
| "learning_rate": 1.5128992774059063e-05, | |
| "loss": 1.0754, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.3581258058547974, | |
| "learning_rate": 1.5099326043901361e-05, | |
| "loss": 1.1324, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.0659667253494263, | |
| "learning_rate": 1.5069598538135905e-05, | |
| "loss": 1.1751, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.4743688106536865, | |
| "learning_rate": 1.503981061106584e-05, | |
| "loss": 1.0226, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.1864526271820068, | |
| "learning_rate": 1.5009962617714425e-05, | |
| "loss": 1.1243, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.156830072402954, | |
| "learning_rate": 1.4980054913820814e-05, | |
| "loss": 1.0716, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.564710021018982, | |
| "learning_rate": 1.4950087855835816e-05, | |
| "loss": 0.9627, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.1137735843658447, | |
| "learning_rate": 1.4920061800917637e-05, | |
| "loss": 1.1136, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.1673604249954224, | |
| "learning_rate": 1.4889977106927642e-05, | |
| "loss": 1.1314, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.5666335821151733, | |
| "learning_rate": 1.485983413242606e-05, | |
| "loss": 1.0272, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.1360833644866943, | |
| "learning_rate": 1.4829633236667746e-05, | |
| "loss": 1.1574, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.2486088275909424, | |
| "learning_rate": 1.4799374779597866e-05, | |
| "loss": 1.135, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.2528752088546753, | |
| "learning_rate": 1.476905912184763e-05, | |
| "loss": 1.0797, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.5337449312210083, | |
| "learning_rate": 1.4738686624729987e-05, | |
| "loss": 0.983, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.3193140029907227, | |
| "learning_rate": 1.470825765023532e-05, | |
| "loss": 1.1208, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.226824402809143, | |
| "learning_rate": 1.4677772561027121e-05, | |
| "loss": 1.1282, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.2305374145507812, | |
| "learning_rate": 1.4647231720437687e-05, | |
| "loss": 1.0775, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.2837001085281372, | |
| "learning_rate": 1.4616635492463775e-05, | |
| "loss": 1.0505, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.2380614280700684, | |
| "learning_rate": 1.4585984241762268e-05, | |
| "loss": 1.1444, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.2850863933563232, | |
| "learning_rate": 1.4555278333645833e-05, | |
| "loss": 1.0587, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.2635538578033447, | |
| "learning_rate": 1.4524518134078565e-05, | |
| "loss": 1.133, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.3266578912734985, | |
| "learning_rate": 1.4493704009671614e-05, | |
| "loss": 1.0022, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.217466950416565, | |
| "learning_rate": 1.446283632767884e-05, | |
| "loss": 1.0461, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.2176854610443115, | |
| "learning_rate": 1.4431915455992416e-05, | |
| "loss": 1.2106, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.6798524856567383, | |
| "learning_rate": 1.440094176313844e-05, | |
| "loss": 0.9052, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.3609615564346313, | |
| "learning_rate": 1.4369915618272568e-05, | |
| "loss": 1.0609, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.1341378688812256, | |
| "learning_rate": 1.4338837391175582e-05, | |
| "loss": 1.1482, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.2776904106140137, | |
| "learning_rate": 1.4307707452249013e-05, | |
| "loss": 1.1396, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.60136878490448, | |
| "learning_rate": 1.42765261725107e-05, | |
| "loss": 0.9093, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.2123847007751465, | |
| "learning_rate": 1.424529392359039e-05, | |
| "loss": 1.1308, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.2390875816345215, | |
| "learning_rate": 1.4214011077725293e-05, | |
| "loss": 1.1384, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.2779793739318848, | |
| "learning_rate": 1.4182678007755653e-05, | |
| "loss": 1.1294, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.5436376333236694, | |
| "learning_rate": 1.4151295087120307e-05, | |
| "loss": 0.9877, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 1.0109174251556396, | |
| "learning_rate": 1.4119862689852224e-05, | |
| "loss": 1.1648, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.2247241735458374, | |
| "learning_rate": 1.4088381190574051e-05, | |
| "loss": 1.1243, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.4918550252914429, | |
| "learning_rate": 1.4056850964493668e-05, | |
| "loss": 1.0968, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.6037483215332031, | |
| "learning_rate": 1.4025272387399676e-05, | |
| "loss": 0.9681, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.2485237121582031, | |
| "learning_rate": 1.3993645835656955e-05, | |
| "loss": 1.0584, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.2098240852355957, | |
| "learning_rate": 1.3961971686202163e-05, | |
| "loss": 1.2082, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 3.810776710510254, | |
| "learning_rate": 1.3930250316539237e-05, | |
| "loss": 0.9797, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.0940262079238892, | |
| "learning_rate": 1.3898482104734909e-05, | |
| "loss": 1.069, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.298141360282898, | |
| "learning_rate": 1.3866667429414188e-05, | |
| "loss": 1.1258, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 2.3309221267700195, | |
| "learning_rate": 1.383480666975586e-05, | |
| "loss": 1.044, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.5910707712173462, | |
| "learning_rate": 1.3802900205487948e-05, | |
| "loss": 1.0111, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.2307648658752441, | |
| "learning_rate": 1.3770948416883205e-05, | |
| "loss": 1.132, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.667038083076477, | |
| "learning_rate": 1.3738951684754585e-05, | |
| "loss": 1.0521, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.4419728517532349, | |
| "learning_rate": 1.3706910390450679e-05, | |
| "loss": 1.0487, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.7289230823516846, | |
| "learning_rate": 1.3674824915851193e-05, | |
| "loss": 1.0874, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.282025694847107, | |
| "learning_rate": 1.3642695643362398e-05, | |
| "loss": 1.1262, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.0562697649002075, | |
| "learning_rate": 1.3610522955912551e-05, | |
| "loss": 1.1482, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.4834318161010742, | |
| "learning_rate": 1.3578307236947348e-05, | |
| "loss": 0.9861, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.1225820779800415, | |
| "learning_rate": 1.3546048870425356e-05, | |
| "loss": 1.1319, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.2173471450805664, | |
| "learning_rate": 1.3513748240813429e-05, | |
| "loss": 1.0627, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.160025954246521, | |
| "learning_rate": 1.3481405733082118e-05, | |
| "loss": 1.1187, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.6597986221313477, | |
| "learning_rate": 1.3449021732701106e-05, | |
| "loss": 0.9575, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.0810602903366089, | |
| "learning_rate": 1.3416596625634595e-05, | |
| "loss": 1.1344, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.354711651802063, | |
| "learning_rate": 1.3384130798336705e-05, | |
| "loss": 0.9683, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.026106357574463, | |
| "learning_rate": 1.3351624637746885e-05, | |
| "loss": 1.1558, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.1363128423690796, | |
| "learning_rate": 1.3319078531285286e-05, | |
| "loss": 1.1278, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.229211449623108, | |
| "learning_rate": 1.3286492866848143e-05, | |
| "loss": 1.063, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.2198550701141357, | |
| "learning_rate": 1.3253868032803171e-05, | |
| "loss": 1.0468, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.2687575817108154, | |
| "learning_rate": 1.3221204417984907e-05, | |
| "loss": 0.9878, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.1524310111999512, | |
| "learning_rate": 1.3188502411690101e-05, | |
| "loss": 1.2017, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.2711753845214844, | |
| "learning_rate": 1.3155762403673065e-05, | |
| "loss": 0.993, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.087242603302002, | |
| "learning_rate": 1.3122984784141021e-05, | |
| "loss": 1.1362, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.236801266670227, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 1.0461, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.1489235162734985, | |
| "learning_rate": 1.3057318273597531e-05, | |
| "loss": 1.0736, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.0725665092468262, | |
| "learning_rate": 1.3024430165223245e-05, | |
| "loss": 1.1093, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.1144657135009766, | |
| "learning_rate": 1.2991506010598965e-05, | |
| "loss": 1.0659, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.4604588747024536, | |
| "learning_rate": 1.2958546202126638e-05, | |
| "loss": 0.959, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.081122636795044, | |
| "learning_rate": 1.2925551132633164e-05, | |
| "loss": 1.1373, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 1.157921552658081, | |
| "learning_rate": 1.2892521195365679e-05, | |
| "loss": 1.1344, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.3245052099227905, | |
| "learning_rate": 1.2859456783986892e-05, | |
| "loss": 0.9987, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.1181085109710693, | |
| "learning_rate": 1.2826358292570398e-05, | |
| "loss": 1.1174, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.141554594039917, | |
| "learning_rate": 1.2793226115595951e-05, | |
| "loss": 1.1099, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.266139030456543, | |
| "learning_rate": 1.2760060647944794e-05, | |
| "loss": 0.9965, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.2865076065063477, | |
| "learning_rate": 1.2726862284894939e-05, | |
| "loss": 0.9805, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.1285760402679443, | |
| "learning_rate": 1.2693631422116455e-05, | |
| "loss": 1.19, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.1956912279129028, | |
| "learning_rate": 1.2660368455666752e-05, | |
| "loss": 1.0404, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.2064231634140015, | |
| "learning_rate": 1.262707378198587e-05, | |
| "loss": 0.9706, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.0089880228042603, | |
| "learning_rate": 1.2593747797891743e-05, | |
| "loss": 1.1606, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.1222283840179443, | |
| "learning_rate": 1.2560390900575472e-05, | |
| "loss": 1.0971, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.0860025882720947, | |
| "learning_rate": 1.2527003487596598e-05, | |
| "loss": 1.0773, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.277338981628418, | |
| "learning_rate": 1.2493585956878354e-05, | |
| "loss": 0.9587, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.1371269226074219, | |
| "learning_rate": 1.2460138706702929e-05, | |
| "loss": 1.0219, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.1647454500198364, | |
| "learning_rate": 1.242666213570672e-05, | |
| "loss": 1.1968, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.240614891052246, | |
| "learning_rate": 1.2393156642875579e-05, | |
| "loss": 1.0158, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.2026546001434326, | |
| "learning_rate": 1.2359622627540059e-05, | |
| "loss": 0.9524, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.139374852180481, | |
| "learning_rate": 1.2326060489370655e-05, | |
| "loss": 1.1023, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 1.2432211637496948, | |
| "learning_rate": 1.229247062837304e-05, | |
| "loss": 1.0266, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.217292070388794, | |
| "learning_rate": 1.2258853444883297e-05, | |
| "loss": 1.0003, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.1538138389587402, | |
| "learning_rate": 1.2225209339563144e-05, | |
| "loss": 1.1153, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.127111792564392, | |
| "learning_rate": 1.219153871339518e-05, | |
| "loss": 1.1139, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.313961386680603, | |
| "learning_rate": 1.2157841967678064e-05, | |
| "loss": 0.9453, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.1468470096588135, | |
| "learning_rate": 1.2124119504021776e-05, | |
| "loss": 0.9939, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.079983115196228, | |
| "learning_rate": 1.2090371724342804e-05, | |
| "loss": 1.1005, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.119608759880066, | |
| "learning_rate": 1.2056599030859367e-05, | |
| "loss": 1.1299, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.379815936088562, | |
| "learning_rate": 1.2022801826086609e-05, | |
| "loss": 0.9659, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.091261386871338, | |
| "learning_rate": 1.1988980512831809e-05, | |
| "loss": 1.1151, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 1.1306649446487427, | |
| "learning_rate": 1.195513549418959e-05, | |
| "loss": 1.0568, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.12276029586792, | |
| "learning_rate": 1.1921267173537085e-05, | |
| "loss": 1.1069, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.2640670537948608, | |
| "learning_rate": 1.1887375954529167e-05, | |
| "loss": 1.0129, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.1537717580795288, | |
| "learning_rate": 1.1853462241093614e-05, | |
| "loss": 1.032, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.1456965208053589, | |
| "learning_rate": 1.1819526437426298e-05, | |
| "loss": 1.0182, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.0719536542892456, | |
| "learning_rate": 1.1785568947986368e-05, | |
| "loss": 1.1317, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.141640543937683, | |
| "learning_rate": 1.1751590177491441e-05, | |
| "loss": 1.0865, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.1007376909255981, | |
| "learning_rate": 1.1717590530912764e-05, | |
| "loss": 1.0372, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.091681957244873, | |
| "learning_rate": 1.1683570413470384e-05, | |
| "loss": 1.0907, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 1.3762112855911255, | |
| "learning_rate": 1.164953023062835e-05, | |
| "loss": 0.908, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.228991150856018, | |
| "learning_rate": 1.1615470388089836e-05, | |
| "loss": 1.0533, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.3046116828918457, | |
| "learning_rate": 1.1581391291792336e-05, | |
| "loss": 1.0362, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.0185683965682983, | |
| "learning_rate": 1.1547293347902813e-05, | |
| "loss": 1.1022, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.2197895050048828, | |
| "learning_rate": 1.151317696281287e-05, | |
| "loss": 1.018, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.1089208126068115, | |
| "learning_rate": 1.1479042543133895e-05, | |
| "loss": 1.0618, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.0808110237121582, | |
| "learning_rate": 1.1444890495692214e-05, | |
| "loss": 1.098, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.0540724992752075, | |
| "learning_rate": 1.1410721227524256e-05, | |
| "loss": 1.0357, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.2666085958480835, | |
| "learning_rate": 1.1376535145871685e-05, | |
| "loss": 1.0264, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.2804818153381348, | |
| "learning_rate": 1.1342332658176556e-05, | |
| "loss": 1.0213, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 1.1511904001235962, | |
| "learning_rate": 1.1308114172076464e-05, | |
| "loss": 1.1473, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.5478615760803223, | |
| "learning_rate": 1.1273880095399667e-05, | |
| "loss": 0.9351, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.0723164081573486, | |
| "learning_rate": 1.1239630836160246e-05, | |
| "loss": 1.1156, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.1251463890075684, | |
| "learning_rate": 1.1205366802553231e-05, | |
| "loss": 1.0915, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.4079906940460205, | |
| "learning_rate": 1.1171088402949739e-05, | |
| "loss": 0.9467, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.1016011238098145, | |
| "learning_rate": 1.1136796045892102e-05, | |
| "loss": 0.9843, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.1380492448806763, | |
| "learning_rate": 1.1102490140089009e-05, | |
| "loss": 1.1932, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.704362392425537, | |
| "learning_rate": 1.1068171094410618e-05, | |
| "loss": 1.0112, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.2886050939559937, | |
| "learning_rate": 1.10338393178837e-05, | |
| "loss": 0.9298, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 1.2823352813720703, | |
| "learning_rate": 1.0999495219686762e-05, | |
| "loss": 1.1235, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.3854316473007202, | |
| "learning_rate": 1.0965139209145153e-05, | |
| "loss": 1.104, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.2376712560653687, | |
| "learning_rate": 1.0930771695726201e-05, | |
| "loss": 1.0406, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.4863057136535645, | |
| "learning_rate": 1.0896393089034336e-05, | |
| "loss": 0.9608, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.1591633558273315, | |
| "learning_rate": 1.0862003798806195e-05, | |
| "loss": 1.1205, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.146751880645752, | |
| "learning_rate": 1.0827604234905749e-05, | |
| "loss": 1.1088, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.1694458723068237, | |
| "learning_rate": 1.079319480731941e-05, | |
| "loss": 1.0264, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.4388976097106934, | |
| "learning_rate": 1.0758775926151155e-05, | |
| "loss": 0.8752, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.0628877878189087, | |
| "learning_rate": 1.0724348001617626e-05, | |
| "loss": 1.1085, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 1.03635835647583, | |
| "learning_rate": 1.0689911444043249e-05, | |
| "loss": 1.1095, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.2480405569076538, | |
| "learning_rate": 1.0655466663855349e-05, | |
| "loss": 0.9926, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.172764778137207, | |
| "learning_rate": 1.0621014071579241e-05, | |
| "loss": 1.1077, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.1374162435531616, | |
| "learning_rate": 1.0586554077833346e-05, | |
| "loss": 1.1056, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.283673882484436, | |
| "learning_rate": 1.0552087093324314e-05, | |
| "loss": 1.0544, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.2234885692596436, | |
| "learning_rate": 1.0517613528842096e-05, | |
| "loss": 0.9911, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.1086273193359375, | |
| "learning_rate": 1.0483133795255072e-05, | |
| "loss": 1.1151, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.216552734375, | |
| "learning_rate": 1.044864830350515e-05, | |
| "loss": 1.1075, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.0701121091842651, | |
| "learning_rate": 1.0414157464602866e-05, | |
| "loss": 1.0528, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.2622308731079102, | |
| "learning_rate": 1.0379661689622477e-05, | |
| "loss": 1.0425, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.0980409383773804, | |
| "learning_rate": 1.0345161389697083e-05, | |
| "loss": 1.0278, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.1280395984649658, | |
| "learning_rate": 1.0310656976013704e-05, | |
| "loss": 1.1132, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.1697815656661987, | |
| "learning_rate": 1.027614885980839e-05, | |
| "loss": 1.0309, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.4110400676727295, | |
| "learning_rate": 1.0241637452361323e-05, | |
| "loss": 0.9193, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.1531561613082886, | |
| "learning_rate": 1.0207123164991912e-05, | |
| "loss": 1.0317, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.0967586040496826, | |
| "learning_rate": 1.0172606409053887e-05, | |
| "loss": 1.1125, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.1400479078292847, | |
| "learning_rate": 1.0138087595930394e-05, | |
| "loss": 1.0896, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.1727864742279053, | |
| "learning_rate": 1.0103567137029111e-05, | |
| "loss": 1.0291, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.099351406097412, | |
| "learning_rate": 1.0069045443777318e-05, | |
| "loss": 1.1167, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.2042431831359863, | |
| "learning_rate": 1.0034522927617014e-05, | |
| "loss": 1.0387, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.2122771739959717, | |
| "learning_rate": 1e-05, | |
| "loss": 1.0024, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.047471284866333, | |
| "learning_rate": 9.965477072382989e-06, | |
| "loss": 1.1256, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.1856547594070435, | |
| "learning_rate": 9.930954556222683e-06, | |
| "loss": 1.0431, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.2654024362564087, | |
| "learning_rate": 9.896432862970892e-06, | |
| "loss": 1.0183, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.0468626022338867, | |
| "learning_rate": 9.861912404069608e-06, | |
| "loss": 1.0285, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.2478185892105103, | |
| "learning_rate": 9.827393590946116e-06, | |
| "loss": 1.1066, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.1957437992095947, | |
| "learning_rate": 9.79287683500809e-06, | |
| "loss": 1.0255, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.206589698791504, | |
| "learning_rate": 9.75836254763868e-06, | |
| "loss": 1.0346, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 1.2985960245132446, | |
| "learning_rate": 9.723851140191613e-06, | |
| "loss": 0.9962, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.097292423248291, | |
| "learning_rate": 9.689343023986303e-06, | |
| "loss": 1.1252, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.310494303703308, | |
| "learning_rate": 9.654838610302922e-06, | |
| "loss": 0.9739, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.1744049787521362, | |
| "learning_rate": 9.620338310377526e-06, | |
| "loss": 1.0807, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.1583396196365356, | |
| "learning_rate": 9.58584253539714e-06, | |
| "loss": 1.1009, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.0837821960449219, | |
| "learning_rate": 9.551351696494854e-06, | |
| "loss": 1.0394, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.2815313339233398, | |
| "learning_rate": 9.516866204744932e-06, | |
| "loss": 1.0037, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.0583621263504028, | |
| "learning_rate": 9.482386471157905e-06, | |
| "loss": 1.0987, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.1983296871185303, | |
| "learning_rate": 9.447912906675687e-06, | |
| "loss": 1.0385, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.4315332174301147, | |
| "learning_rate": 9.413445922166654e-06, | |
| "loss": 0.9143, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 1.1101816892623901, | |
| "learning_rate": 9.378985928420764e-06, | |
| "loss": 1.1194, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.1557713747024536, | |
| "learning_rate": 9.344533336144653e-06, | |
| "loss": 1.0882, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.1829181909561157, | |
| "learning_rate": 9.310088555956751e-06, | |
| "loss": 1.0285, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.2111883163452148, | |
| "learning_rate": 9.275651998382377e-06, | |
| "loss": 0.9647, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.0640887022018433, | |
| "learning_rate": 9.241224073848848e-06, | |
| "loss": 1.0927, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.1613415479660034, | |
| "learning_rate": 9.206805192680592e-06, | |
| "loss": 1.0959, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.145620346069336, | |
| "learning_rate": 9.172395765094255e-06, | |
| "loss": 1.0888, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.1621886491775513, | |
| "learning_rate": 9.137996201193807e-06, | |
| "loss": 0.9776, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.3330769538879395, | |
| "learning_rate": 9.103606910965666e-06, | |
| "loss": 0.898, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 0.9906296133995056, | |
| "learning_rate": 9.069228304273802e-06, | |
| "loss": 1.1105, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.045145034790039, | |
| "learning_rate": 9.034860790854848e-06, | |
| "loss": 1.0883, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.2458213567733765, | |
| "learning_rate": 9.00050478031324e-06, | |
| "loss": 0.9549, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.0897191762924194, | |
| "learning_rate": 8.966160682116301e-06, | |
| "loss": 1.1032, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.118552803993225, | |
| "learning_rate": 8.931828905589385e-06, | |
| "loss": 1.0901, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.31868314743042, | |
| "learning_rate": 8.897509859910996e-06, | |
| "loss": 0.8825, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 0.9434429407119751, | |
| "learning_rate": 8.863203954107902e-06, | |
| "loss": 1.1377, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.1337504386901855, | |
| "learning_rate": 8.828911597050263e-06, | |
| "loss": 1.0849, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.1845532655715942, | |
| "learning_rate": 8.79463319744677e-06, | |
| "loss": 1.0565, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.1930965185165405, | |
| "learning_rate": 8.760369163839759e-06, | |
| "loss": 1.0123, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 1.0940407514572144, | |
| "learning_rate": 8.726119904600337e-06, | |
| "loss": 1.0417, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.0366013050079346, | |
| "learning_rate": 8.691885827923541e-06, | |
| "loss": 1.1119, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.115625262260437, | |
| "learning_rate": 8.657667341823449e-06, | |
| "loss": 1.1051, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.167714238166809, | |
| "learning_rate": 8.62346485412832e-06, | |
| "loss": 0.9347, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.1591241359710693, | |
| "learning_rate": 8.58927877247575e-06, | |
| "loss": 0.9986, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.0388243198394775, | |
| "learning_rate": 8.55510950430779e-06, | |
| "loss": 1.1991, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.4323804378509521, | |
| "learning_rate": 8.520957456866107e-06, | |
| "loss": 0.961, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.3130244016647339, | |
| "learning_rate": 8.48682303718713e-06, | |
| "loss": 0.9239, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.1116801500320435, | |
| "learning_rate": 8.452706652097187e-06, | |
| "loss": 1.0831, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 1.2475357055664062, | |
| "learning_rate": 8.418608708207667e-06, | |
| "loss": 1.0868, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.3295010328292847, | |
| "learning_rate": 8.384529611910164e-06, | |
| "loss": 0.9415, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.1048485040664673, | |
| "learning_rate": 8.35046976937165e-06, | |
| "loss": 1.1225, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.3689091205596924, | |
| "learning_rate": 8.316429586529616e-06, | |
| "loss": 1.0474, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.408400297164917, | |
| "learning_rate": 8.28240946908724e-06, | |
| "loss": 1.0187, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.063767433166504, | |
| "learning_rate": 8.24840982250856e-06, | |
| "loss": 1.0282, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.2288846969604492, | |
| "learning_rate": 8.214431052013636e-06, | |
| "loss": 1.073, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.2495815753936768, | |
| "learning_rate": 8.180473562573705e-06, | |
| "loss": 1.0157, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.2062842845916748, | |
| "learning_rate": 8.146537758906388e-06, | |
| "loss": 1.0082, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 1.091819167137146, | |
| "learning_rate": 8.112624045470834e-06, | |
| "loss": 1.095, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.3363701105117798, | |
| "learning_rate": 8.078732826462917e-06, | |
| "loss": 1.0265, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.3668080568313599, | |
| "learning_rate": 8.044864505810415e-06, | |
| "loss": 1.054, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.0828039646148682, | |
| "learning_rate": 8.011019487168193e-06, | |
| "loss": 1.0158, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.161185383796692, | |
| "learning_rate": 7.977198173913394e-06, | |
| "loss": 1.086, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.2727521657943726, | |
| "learning_rate": 7.943400969140635e-06, | |
| "loss": 1.0391, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.2065167427062988, | |
| "learning_rate": 7.909628275657199e-06, | |
| "loss": 1.0733, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.366819143295288, | |
| "learning_rate": 7.875880495978227e-06, | |
| "loss": 1.0121, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.0957239866256714, | |
| "learning_rate": 7.84215803232194e-06, | |
| "loss": 1.0409, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.1914026737213135, | |
| "learning_rate": 7.808461286604828e-06, | |
| "loss": 1.0874, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 1.5846854448318481, | |
| "learning_rate": 7.774790660436857e-06, | |
| "loss": 0.9347, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.025207757949829, | |
| "learning_rate": 7.741146555116708e-06, | |
| "loss": 1.0469, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.1924024820327759, | |
| "learning_rate": 7.707529371626966e-06, | |
| "loss": 1.114, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.5877397060394287, | |
| "learning_rate": 7.67393951062935e-06, | |
| "loss": 1.061, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.325735330581665, | |
| "learning_rate": 7.640377372459944e-06, | |
| "loss": 1.001, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.0528124570846558, | |
| "learning_rate": 7.606843357124426e-06, | |
| "loss": 1.0927, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.1408836841583252, | |
| "learning_rate": 7.573337864293283e-06, | |
| "loss": 1.0949, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.1893867254257202, | |
| "learning_rate": 7.539861293297073e-06, | |
| "loss": 1.0165, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.6183538436889648, | |
| "learning_rate": 7.506414043121647e-06, | |
| "loss": 0.9169, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.0619072914123535, | |
| "learning_rate": 7.472996512403403e-06, | |
| "loss": 1.113, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.2360881567001343, | |
| "learning_rate": 7.4396090994245295e-06, | |
| "loss": 1.0816, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.3750165700912476, | |
| "learning_rate": 7.406252202108258e-06, | |
| "loss": 1.0001, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.3785020112991333, | |
| "learning_rate": 7.372926218014131e-06, | |
| "loss": 1.0184, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.0877310037612915, | |
| "learning_rate": 7.33963154433325e-06, | |
| "loss": 1.0917, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.2650117874145508, | |
| "learning_rate": 7.306368577883547e-06, | |
| "loss": 1.0217, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.8510770797729492, | |
| "learning_rate": 7.273137715105063e-06, | |
| "loss": 0.9644, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.0900006294250488, | |
| "learning_rate": 7.239939352055208e-06, | |
| "loss": 1.1778, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.6603879928588867, | |
| "learning_rate": 7.2067738844040516e-06, | |
| "loss": 0.9478, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.4141948223114014, | |
| "learning_rate": 7.173641707429606e-06, | |
| "loss": 1.0213, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 1.5857387781143188, | |
| "learning_rate": 7.140543216013109e-06, | |
| "loss": 1.0409, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.119321584701538, | |
| "learning_rate": 7.107478804634324e-06, | |
| "loss": 1.0929, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.182167410850525, | |
| "learning_rate": 7.07444886736684e-06, | |
| "loss": 1.1024, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.4024657011032104, | |
| "learning_rate": 7.041453797873363e-06, | |
| "loss": 0.9307, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.08544921875, | |
| "learning_rate": 7.008493989401039e-06, | |
| "loss": 0.9931, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.977465033531189, | |
| "learning_rate": 6.975569834776757e-06, | |
| "loss": 1.1982, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.7482020854949951, | |
| "learning_rate": 6.942681726402474e-06, | |
| "loss": 0.945, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.4048359394073486, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.9493, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 0.974734902381897, | |
| "learning_rate": 6.8770152158589806e-06, | |
| "loss": 1.0988, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 1.1412526369094849, | |
| "learning_rate": 6.844237596326941e-06, | |
| "loss": 1.0842, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.4298932552337646, | |
| "learning_rate": 6.811497588309901e-06, | |
| "loss": 1.0342, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.3431940078735352, | |
| "learning_rate": 6.778795582015096e-06, | |
| "loss": 0.9531, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.0488766431808472, | |
| "learning_rate": 6.746131967196834e-06, | |
| "loss": 1.1047, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.778768539428711, | |
| "learning_rate": 6.7135071331518575e-06, | |
| "loss": 0.931, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.1012409925460815, | |
| "learning_rate": 6.680921468714718e-06, | |
| "loss": 1.0128, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.0400038957595825, | |
| "learning_rate": 6.648375362253119e-06, | |
| "loss": 1.0885, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.0656168460845947, | |
| "learning_rate": 6.615869201663296e-06, | |
| "loss": 1.0293, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.2684777975082397, | |
| "learning_rate": 6.583403374365406e-06, | |
| "loss": 0.9943, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 1.0550339221954346, | |
| "learning_rate": 6.550978267298893e-06, | |
| "loss": 1.0767, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.0904154777526855, | |
| "learning_rate": 6.518594266917883e-06, | |
| "loss": 1.0133, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.0179896354675293, | |
| "learning_rate": 6.486251759186573e-06, | |
| "loss": 1.0753, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.2216280698776245, | |
| "learning_rate": 6.453951129574644e-06, | |
| "loss": 1.0446, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.277762770652771, | |
| "learning_rate": 6.421692763052654e-06, | |
| "loss": 1.0345, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.0452277660369873, | |
| "learning_rate": 6.3894770440874525e-06, | |
| "loss": 0.9939, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.098008632659912, | |
| "learning_rate": 6.357304356637606e-06, | |
| "loss": 1.0877, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.248223900794983, | |
| "learning_rate": 6.325175084148809e-06, | |
| "loss": 1.0169, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.1869797706604004, | |
| "learning_rate": 6.293089609549325e-06, | |
| "loss": 0.9592, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 0.9501675367355347, | |
| "learning_rate": 6.261048315245419e-06, | |
| "loss": 1.1065, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.1710013151168823, | |
| "learning_rate": 6.229051583116796e-06, | |
| "loss": 0.892, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.077901005744934, | |
| "learning_rate": 6.197099794512056e-06, | |
| "loss": 1.0481, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.031724452972412, | |
| "learning_rate": 6.165193330244144e-06, | |
| "loss": 1.0808, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.0057587623596191, | |
| "learning_rate": 6.133332570585813e-06, | |
| "loss": 1.0323, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.2024904489517212, | |
| "learning_rate": 6.101517895265094e-06, | |
| "loss": 1.0076, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.078473448753357, | |
| "learning_rate": 6.069749683460765e-06, | |
| "loss": 1.0988, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.036250352859497, | |
| "learning_rate": 6.03802831379784e-06, | |
| "loss": 1.088, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.987162709236145, | |
| "learning_rate": 6.006354164343047e-06, | |
| "loss": 1.0138, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.094117522239685, | |
| "learning_rate": 5.9747276126003265e-06, | |
| "loss": 1.0177, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.1063374280929565, | |
| "learning_rate": 5.943149035506337e-06, | |
| "loss": 0.9906, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.184809684753418, | |
| "learning_rate": 5.911618809425952e-06, | |
| "loss": 1.1149, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.360694408416748, | |
| "learning_rate": 5.880137310147782e-06, | |
| "loss": 0.919, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.0518381595611572, | |
| "learning_rate": 5.848704912879699e-06, | |
| "loss": 1.0812, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.1917508840560913, | |
| "learning_rate": 5.8173219922443516e-06, | |
| "loss": 1.0718, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.1311396360397339, | |
| "learning_rate": 5.785988922274711e-06, | |
| "loss": 1.0231, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.1775773763656616, | |
| "learning_rate": 5.754706076409613e-06, | |
| "loss": 1.0348, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.0530229806900024, | |
| "learning_rate": 5.723473827489301e-06, | |
| "loss": 1.0786, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.0041298866271973, | |
| "learning_rate": 5.692292547750989e-06, | |
| "loss": 1.0184, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.1858335733413696, | |
| "learning_rate": 5.66116260882442e-06, | |
| "loss": 0.9255, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 1.0058790445327759, | |
| "learning_rate": 5.630084381727434e-06, | |
| "loss": 1.0097, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.0766551494598389, | |
| "learning_rate": 5.599058236861559e-06, | |
| "loss": 1.0815, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.1882452964782715, | |
| "learning_rate": 5.5680845440075885e-06, | |
| "loss": 0.9794, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.1177659034729004, | |
| "learning_rate": 5.537163672321161e-06, | |
| "loss": 0.9706, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.0302551984786987, | |
| "learning_rate": 5.5062959903283855e-06, | |
| "loss": 1.0865, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 0.9774730801582336, | |
| "learning_rate": 5.475481865921441e-06, | |
| "loss": 1.0975, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.13545823097229, | |
| "learning_rate": 5.444721666354169e-06, | |
| "loss": 1.011, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.0284937620162964, | |
| "learning_rate": 5.414015758237734e-06, | |
| "loss": 0.9343, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.0100305080413818, | |
| "learning_rate": 5.3833645075362295e-06, | |
| "loss": 1.1738, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 1.0857547521591187, | |
| "learning_rate": 5.352768279562315e-06, | |
| "loss": 1.0047, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.1492998600006104, | |
| "learning_rate": 5.32222743897288e-06, | |
| "loss": 1.0088, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.0108751058578491, | |
| "learning_rate": 5.2917423497646834e-06, | |
| "loss": 1.0409, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.0216511487960815, | |
| "learning_rate": 5.2613133752700145e-06, | |
| "loss": 1.0759, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.9948233962059021, | |
| "learning_rate": 5.230940878152371e-06, | |
| "loss": 1.0643, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.1158093214035034, | |
| "learning_rate": 5.200625220402139e-06, | |
| "loss": 0.8457, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.0142375230789185, | |
| "learning_rate": 5.1703667633322575e-06, | |
| "loss": 1.0795, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 0.9932689666748047, | |
| "learning_rate": 5.14016586757394e-06, | |
| "loss": 1.0064, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.1480035781860352, | |
| "learning_rate": 5.110022893072361e-06, | |
| "loss": 0.9945, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 1.1374469995498657, | |
| "learning_rate": 5.079938199082363e-06, | |
| "loss": 1.0217, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.0328603982925415, | |
| "learning_rate": 5.049912144164186e-06, | |
| "loss": 1.016, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.0324801206588745, | |
| "learning_rate": 5.019945086179192e-06, | |
| "loss": 1.0573, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.083923578262329, | |
| "learning_rate": 4.9900373822855805e-06, | |
| "loss": 1.0689, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.0592447519302368, | |
| "learning_rate": 4.960189388934163e-06, | |
| "loss": 0.9942, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.0185003280639648, | |
| "learning_rate": 4.930401461864099e-06, | |
| "loss": 1.1126, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.0514934062957764, | |
| "learning_rate": 4.900673956098644e-06, | |
| "loss": 0.9955, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.1137511730194092, | |
| "learning_rate": 4.87100722594094e-06, | |
| "loss": 1.0337, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.132507562637329, | |
| "learning_rate": 4.841401624969782e-06, | |
| "loss": 1.0179, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.0256370306015015, | |
| "learning_rate": 4.811857506035407e-06, | |
| "loss": 1.081, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 0.947429895401001, | |
| "learning_rate": 4.7823752212552855e-06, | |
| "loss": 1.1175, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.1376070976257324, | |
| "learning_rate": 4.75295512200992e-06, | |
| "loss": 0.9234, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.005340337753296, | |
| "learning_rate": 4.7235975589386715e-06, | |
| "loss": 1.0681, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.0254008769989014, | |
| "learning_rate": 4.694302881935574e-06, | |
| "loss": 0.9984, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.1366630792617798, | |
| "learning_rate": 4.66507144014515e-06, | |
| "loss": 0.9158, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.0358850955963135, | |
| "learning_rate": 4.635903581958276e-06, | |
| "loss": 1.0862, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.0538750886917114, | |
| "learning_rate": 4.606799655008009e-06, | |
| "loss": 1.0975, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.1968530416488647, | |
| "learning_rate": 4.5777600061654505e-06, | |
| "loss": 0.9169, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 0.9868187308311462, | |
| "learning_rate": 4.5487849815356145e-06, | |
| "loss": 1.1307, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 1.0044755935668945, | |
| "learning_rate": 4.519874926453303e-06, | |
| "loss": 1.0765, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0476038455963135, | |
| "learning_rate": 4.491030185478976e-06, | |
| "loss": 1.0052, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.1633585691452026, | |
| "learning_rate": 4.462251102394669e-06, | |
| "loss": 0.8796, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0574334859848022, | |
| "learning_rate": 4.433538020199882e-06, | |
| "loss": 1.0953, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.9922649264335632, | |
| "learning_rate": 4.404891281107482e-06, | |
| "loss": 1.0842, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0577690601348877, | |
| "learning_rate": 4.3763112265396445e-06, | |
| "loss": 1.027, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0246644020080566, | |
| "learning_rate": 4.347798197123777e-06, | |
| "loss": 0.9842, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0055768489837646, | |
| "learning_rate": 4.319352532688444e-06, | |
| "loss": 1.0784, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 0.9931952357292175, | |
| "learning_rate": 4.290974572259342e-06, | |
| "loss": 1.0256, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.0235830545425415, | |
| "learning_rate": 4.262664654055247e-06, | |
| "loss": 1.0785, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.225419521331787, | |
| "learning_rate": 4.234423115483971e-06, | |
| "loss": 0.967, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.0456818342208862, | |
| "learning_rate": 4.206250293138366e-06, | |
| "loss": 0.9938, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.0311731100082397, | |
| "learning_rate": 4.178146522792296e-06, | |
| "loss": 1.1887, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.148885726928711, | |
| "learning_rate": 4.15011213939663e-06, | |
| "loss": 0.8242, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.0714539289474487, | |
| "learning_rate": 4.12214747707527e-06, | |
| "loss": 1.0034, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 0.925980806350708, | |
| "learning_rate": 4.094252869121153e-06, | |
| "loss": 1.1107, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.0093046426773071, | |
| "learning_rate": 4.066428647992275e-06, | |
| "loss": 1.074, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.1428567171096802, | |
| "learning_rate": 4.038675145307747e-06, | |
| "loss": 0.8293, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.0490410327911377, | |
| "learning_rate": 4.010992691843829e-06, | |
| "loss": 1.0945, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.0624845027923584, | |
| "learning_rate": 3.98338161752999e-06, | |
| "loss": 1.0922, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.0150221586227417, | |
| "learning_rate": 3.955842251444978e-06, | |
| "loss": 1.0827, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.1132349967956543, | |
| "learning_rate": 3.9283749218128885e-06, | |
| "loss": 0.9355, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.9088261723518372, | |
| "learning_rate": 3.900979955999271e-06, | |
| "loss": 1.1191, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.0263450145721436, | |
| "learning_rate": 3.8736576805072165e-06, | |
| "loss": 1.1022, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.0015835762023926, | |
| "learning_rate": 3.846408420973456e-06, | |
| "loss": 1.0451, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.1172919273376465, | |
| "learning_rate": 3.819232502164499e-06, | |
| "loss": 0.9474, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.0435349941253662, | |
| "learning_rate": 3.792130247972756e-06, | |
| "loss": 0.9821, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.0229796171188354, | |
| "learning_rate": 3.7651019814126656e-06, | |
| "loss": 1.183, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 1.1353776454925537, | |
| "learning_rate": 3.738148024616863e-06, | |
| "loss": 0.913, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0028164386749268, | |
| "learning_rate": 3.7112686988323353e-06, | |
| "loss": 1.0281, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0187978744506836, | |
| "learning_rate": 3.684464324416578e-06, | |
| "loss": 1.0613, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0470021963119507, | |
| "learning_rate": 3.6577352208338015e-06, | |
| "loss": 1.0183, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0930383205413818, | |
| "learning_rate": 3.6310817066511106e-06, | |
| "loss": 0.9686, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.9942764639854431, | |
| "learning_rate": 3.604504099534696e-06, | |
| "loss": 1.0748, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0142755508422852, | |
| "learning_rate": 3.578002716246074e-06, | |
| "loss": 0.9741, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.2134594917297363, | |
| "learning_rate": 3.5515778726382967e-06, | |
| "loss": 0.9834, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0330934524536133, | |
| "learning_rate": 3.525229883652177e-06, | |
| "loss": 1.0183, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.0006968975067139, | |
| "learning_rate": 3.4989590633125583e-06, | |
| "loss": 1.097, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 0.9243653416633606, | |
| "learning_rate": 3.4727657247245607e-06, | |
| "loss": 1.1231, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.1118088960647583, | |
| "learning_rate": 3.446650180069837e-06, | |
| "loss": 0.9367, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.0078818798065186, | |
| "learning_rate": 3.4206127406028744e-06, | |
| "loss": 1.0813, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.9553260207176208, | |
| "learning_rate": 3.394653716647277e-06, | |
| "loss": 1.0241, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.0123486518859863, | |
| "learning_rate": 3.3687734175920505e-06, | |
| "loss": 1.067, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.172294020652771, | |
| "learning_rate": 3.342972151887941e-06, | |
| "loss": 0.896, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.0110704898834229, | |
| "learning_rate": 3.317250227043746e-06, | |
| "loss": 1.0728, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 1.2382794618606567, | |
| "learning_rate": 3.2916079496226407e-06, | |
| "loss": 0.943, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.9401190876960754, | |
| "learning_rate": 3.266045625238539e-06, | |
| "loss": 1.1266, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 0.9894123673439026, | |
| "learning_rate": 3.2405635585524566e-06, | |
| "loss": 1.0754, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.978413999080658, | |
| "learning_rate": 3.21516205326885e-06, | |
| "loss": 1.0067, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.109924554824829, | |
| "learning_rate": 3.1898414121320277e-06, | |
| "loss": 1.0144, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.163025140762329, | |
| "learning_rate": 3.1646019369225277e-06, | |
| "loss": 0.9472, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.9735128283500671, | |
| "learning_rate": 3.1394439284535206e-06, | |
| "loss": 1.1454, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.95139080286026, | |
| "learning_rate": 3.114367686567228e-06, | |
| "loss": 0.91, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.9900702238082886, | |
| "learning_rate": 3.089373510131354e-06, | |
| "loss": 1.0956, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 1.0989785194396973, | |
| "learning_rate": 3.064461697035506e-06, | |
| "loss": 0.9886, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.9499761462211609, | |
| "learning_rate": 3.0396325441876627e-06, | |
| "loss": 1.0318, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.9807543158531189, | |
| "learning_rate": 3.0148863475106315e-06, | |
| "loss": 1.0785, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.9778667092323303, | |
| "learning_rate": 2.9902234019385056e-06, | |
| "loss": 1.0331, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.0859335660934448, | |
| "learning_rate": 2.9656440014131737e-06, | |
| "loss": 0.8759, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.0425362586975098, | |
| "learning_rate": 2.941148438880803e-06, | |
| "loss": 1.0943, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.9606037735939026, | |
| "learning_rate": 2.9167370062883403e-06, | |
| "loss": 1.0781, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.1606569290161133, | |
| "learning_rate": 2.8924099945800533e-06, | |
| "loss": 0.9311, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.962669312953949, | |
| "learning_rate": 2.8681676936940397e-06, | |
| "loss": 1.0928, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 0.9829181432723999, | |
| "learning_rate": 2.8440103925587904e-06, | |
| "loss": 1.0623, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.0849944353103638, | |
| "learning_rate": 2.8199383790897405e-06, | |
| "loss": 0.9099, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.0360709428787231, | |
| "learning_rate": 2.795951940185827e-06, | |
| "loss": 0.9296, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 1.00126051902771, | |
| "learning_rate": 2.7720513617260857e-06, | |
| "loss": 1.1704, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.0162365436553955, | |
| "learning_rate": 2.748236928566238e-06, | |
| "loss": 0.9932, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.0815722942352295, | |
| "learning_rate": 2.7245089245352864e-06, | |
| "loss": 0.9279, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9318661093711853, | |
| "learning_rate": 2.700867632432145e-06, | |
| "loss": 1.1146, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9809348583221436, | |
| "learning_rate": 2.6773133340222677e-06, | |
| "loss": 1.0777, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9920380115509033, | |
| "learning_rate": 2.6538463100342773e-06, | |
| "loss": 1.0181, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 1.0896059274673462, | |
| "learning_rate": 2.6304668401566334e-06, | |
| "loss": 0.9128, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9949503540992737, | |
| "learning_rate": 2.607175203034299e-06, | |
| "loss": 0.9746, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9890156388282776, | |
| "learning_rate": 2.5839716762654e-06, | |
| "loss": 1.1763, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 0.9868981838226318, | |
| "learning_rate": 2.56085653639795e-06, | |
| "loss": 0.9838, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.343011736869812, | |
| "learning_rate": 2.5378300589265258e-06, | |
| "loss": 0.9213, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.1747832298278809, | |
| "learning_rate": 2.514892518288988e-06, | |
| "loss": 1.0731, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.0571181774139404, | |
| "learning_rate": 2.4920441878632273e-06, | |
| "loss": 0.9929, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.0311808586120605, | |
| "learning_rate": 2.469285339963892e-06, | |
| "loss": 0.9643, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.9623830914497375, | |
| "learning_rate": 2.4466162458391364e-06, | |
| "loss": 1.0802, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.0405855178833008, | |
| "learning_rate": 2.4240371756674063e-06, | |
| "loss": 1.0881, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.1836566925048828, | |
| "learning_rate": 2.401548398554213e-06, | |
| "loss": 0.9072, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 1.0282421112060547, | |
| "learning_rate": 2.379150182528909e-06, | |
| "loss": 0.9555, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.919743001461029, | |
| "learning_rate": 2.3568427945415163e-06, | |
| "loss": 1.0565, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 0.9282224178314209, | |
| "learning_rate": 2.334626500459539e-06, | |
| "loss": 1.0876, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.1041455268859863, | |
| "learning_rate": 2.3125015650647798e-06, | |
| "loss": 0.9093, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.0091993808746338, | |
| "learning_rate": 2.290468252050204e-06, | |
| "loss": 1.0585, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.9351275563240051, | |
| "learning_rate": 2.26852682401679e-06, | |
| "loss": 0.9875, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.0261762142181396, | |
| "learning_rate": 2.246677542470388e-06, | |
| "loss": 1.0674, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.1478934288024902, | |
| "learning_rate": 2.224920667818622e-06, | |
| "loss": 0.9793, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.9829080700874329, | |
| "learning_rate": 2.2032564593677773e-06, | |
| "loss": 0.9689, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.9616503119468689, | |
| "learning_rate": 2.1816851753197023e-06, | |
| "loss": 0.9927, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 0.9927416443824768, | |
| "learning_rate": 2.1602070727687463e-06, | |
| "loss": 1.0848, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 1.0184030532836914, | |
| "learning_rate": 2.1388224076986872e-06, | |
| "loss": 1.0522, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9854580163955688, | |
| "learning_rate": 2.117531434979675e-06, | |
| "loss": 1.0267, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9567153453826904, | |
| "learning_rate": 2.096334408365207e-06, | |
| "loss": 1.0587, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.1567630767822266, | |
| "learning_rate": 2.075231580489098e-06, | |
| "loss": 0.8727, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.0150424242019653, | |
| "learning_rate": 2.0542232028624585e-06, | |
| "loss": 1.0043, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9852665066719055, | |
| "learning_rate": 2.033309525870717e-06, | |
| "loss": 1.0168, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9206901788711548, | |
| "learning_rate": 2.0124907987706243e-06, | |
| "loss": 1.096, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 1.1059908866882324, | |
| "learning_rate": 1.991767269687278e-06, | |
| "loss": 0.9925, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9824846982955933, | |
| "learning_rate": 1.971139185611176e-06, | |
| "loss": 1.0181, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.9966596364974976, | |
| "learning_rate": 1.9506067923952676e-06, | |
| "loss": 1.0824, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.023253083229065, | |
| "learning_rate": 1.930170334752025e-06, | |
| "loss": 1.0375, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.1030439138412476, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.9924, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.9880173206329346, | |
| "learning_rate": 1.8895861993135444e-06, | |
| "loss": 0.9677, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.9782374501228333, | |
| "learning_rate": 1.8694390052146737e-06, | |
| "loss": 1.1012, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.083292841911316, | |
| "learning_rate": 1.8493887140754462e-06, | |
| "loss": 0.8907, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.9346583485603333, | |
| "learning_rate": 1.8294355648624607e-06, | |
| "loss": 1.0905, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.9697967767715454, | |
| "learning_rate": 1.8095797953845507e-06, | |
| "loss": 1.0736, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.0436257123947144, | |
| "learning_rate": 1.789821642289945e-06, | |
| "loss": 0.9198, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 1.0333011150360107, | |
| "learning_rate": 1.7701613410634367e-06, | |
| "loss": 1.0058, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 0.9973543286323547, | |
| "learning_rate": 1.750599126023591e-06, | |
| "loss": 1.1536, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.978995680809021, | |
| "learning_rate": 1.731135230319948e-06, | |
| "loss": 0.999, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.1003906726837158, | |
| "learning_rate": 1.7117698859302357e-06, | |
| "loss": 0.9195, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.9167702794075012, | |
| "learning_rate": 1.692503323657617e-06, | |
| "loss": 1.0834, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.0143738985061646, | |
| "learning_rate": 1.6733357731279375e-06, | |
| "loss": 1.0864, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 0.9755336046218872, | |
| "learning_rate": 1.6542674627869738e-06, | |
| "loss": 1.0118, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.1794527769088745, | |
| "learning_rate": 1.6352986198977327e-06, | |
| "loss": 0.9065, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.0122777223587036, | |
| "learning_rate": 1.6164294705377292e-06, | |
| "loss": 1.0883, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.0221326351165771, | |
| "learning_rate": 1.5976602395962892e-06, | |
| "loss": 1.0812, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.0288370847702026, | |
| "learning_rate": 1.5789911507718824e-06, | |
| "loss": 1.0038, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.111275315284729, | |
| "learning_rate": 1.560422426569449e-06, | |
| "loss": 0.8388, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.9718247652053833, | |
| "learning_rate": 1.5419542882977367e-06, | |
| "loss": 1.0562, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.8943420648574829, | |
| "learning_rate": 1.523586956066686e-06, | |
| "loss": 1.0755, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.117976188659668, | |
| "learning_rate": 1.5053206487847916e-06, | |
| "loss": 0.9727, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.9306220412254333, | |
| "learning_rate": 1.4871555841564889e-06, | |
| "loss": 1.0657, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.9987902641296387, | |
| "learning_rate": 1.4690919786795766e-06, | |
| "loss": 1.0742, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.1273653507232666, | |
| "learning_rate": 1.4511300476426227e-06, | |
| "loss": 1.0238, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 1.059403896331787, | |
| "learning_rate": 1.433270005122399e-06, | |
| "loss": 0.9304, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.8933417797088623, | |
| "learning_rate": 1.4155120639813392e-06, | |
| "loss": 1.0736, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 0.9639893770217896, | |
| "learning_rate": 1.3978564358649926e-06, | |
| "loss": 1.0735, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.0888656377792358, | |
| "learning_rate": 1.3803033311995072e-06, | |
| "loss": 1.026, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.1234164237976074, | |
| "learning_rate": 1.3628529591891181e-06, | |
| "loss": 0.9933, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.9895060062408447, | |
| "learning_rate": 1.345505527813652e-06, | |
| "loss": 1.0127, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.9872562885284424, | |
| "learning_rate": 1.3282612438260578e-06, | |
| "loss": 1.0825, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.1106939315795898, | |
| "learning_rate": 1.311120312749935e-06, | |
| "loss": 1.0116, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.0756417512893677, | |
| "learning_rate": 1.2940829388770837e-06, | |
| "loss": 0.8872, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.9590853452682495, | |
| "learning_rate": 1.2771493252650723e-06, | |
| "loss": 1.0034, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 0.9526690244674683, | |
| "learning_rate": 1.2603196737348211e-06, | |
| "loss": 1.0512, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 1.0245425701141357, | |
| "learning_rate": 1.2435941848681864e-06, | |
| "loss": 1.0665, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.9663554430007935, | |
| "learning_rate": 1.2269730580055806e-06, | |
| "loss": 1.0159, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.9696429371833801, | |
| "learning_rate": 1.2104564912435924e-06, | |
| "loss": 1.0869, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.1331549882888794, | |
| "learning_rate": 1.19404468143262e-06, | |
| "loss": 1.0194, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.9671963453292847, | |
| "learning_rate": 1.1777378241745385e-06, | |
| "loss": 0.9755, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.9862372875213623, | |
| "learning_rate": 1.1615361138203574e-06, | |
| "loss": 1.11, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.8697640299797058, | |
| "learning_rate": 1.1454397434679022e-06, | |
| "loss": 1.0162, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 1.0795180797576904, | |
| "learning_rate": 1.1294489049595247e-06, | |
| "loss": 0.9859, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.9713975191116333, | |
| "learning_rate": 1.1135637888798101e-06, | |
| "loss": 1.007, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 0.9347055554389954, | |
| "learning_rate": 1.0977845845533009e-06, | |
| "loss": 1.0648, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.9483699798583984, | |
| "learning_rate": 1.0821114800422482e-06, | |
| "loss": 0.9969, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.1327446699142456, | |
| "learning_rate": 1.066544662144371e-06, | |
| "loss": 0.9817, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.972658097743988, | |
| "learning_rate": 1.0510843163906148e-06, | |
| "loss": 0.9983, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.9550564289093018, | |
| "learning_rate": 1.0357306270429623e-06, | |
| "loss": 1.1077, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.0752533674240112, | |
| "learning_rate": 1.020483777092226e-06, | |
| "loss": 0.937, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.961311399936676, | |
| "learning_rate": 1.0053439482558602e-06, | |
| "loss": 1.0783, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.9712187051773071, | |
| "learning_rate": 9.903113209758098e-07, | |
| "loss": 1.0669, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.0024086236953735, | |
| "learning_rate": 9.753860744163524e-07, | |
| "loss": 1.0292, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 1.1103029251098633, | |
| "learning_rate": 9.605683864619574e-07, | |
| "loss": 0.992, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 0.9723556637763977, | |
| "learning_rate": 9.458584337151811e-07, | |
| "loss": 1.0766, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.0514123439788818, | |
| "learning_rate": 9.312563914945461e-07, | |
| "loss": 1.0176, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.1007330417633057, | |
| "learning_rate": 9.167624338324599e-07, | |
| "loss": 0.8982, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.8976367115974426, | |
| "learning_rate": 9.023767334731426e-07, | |
| "loss": 1.08, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.9477424621582031, | |
| "learning_rate": 8.880994618705574e-07, | |
| "loss": 1.0746, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.9403738379478455, | |
| "learning_rate": 8.739307891863813e-07, | |
| "loss": 1.0109, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.0609928369522095, | |
| "learning_rate": 8.598708842879688e-07, | |
| "loss": 0.9224, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.9886844754219055, | |
| "learning_rate": 8.459199147463371e-07, | |
| "loss": 1.0723, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 0.9454167485237122, | |
| "learning_rate": 8.320780468341761e-07, | |
| "loss": 1.0698, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.015596866607666, | |
| "learning_rate": 8.183454455238638e-07, | |
| "loss": 1.0672, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.035571575164795, | |
| "learning_rate": 8.047222744854943e-07, | |
| "loss": 0.9492, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.0910913944244385, | |
| "learning_rate": 7.912086960849374e-07, | |
| "loss": 0.8845, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.8773934245109558, | |
| "learning_rate": 7.778048713818975e-07, | |
| "loss": 1.0914, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.9601214528083801, | |
| "learning_rate": 7.645109601279921e-07, | |
| "loss": 1.0584, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.0622022151947021, | |
| "learning_rate": 7.513271207648531e-07, | |
| "loss": 0.9366, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.9744629263877869, | |
| "learning_rate": 7.382535104222366e-07, | |
| "loss": 1.0634, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.9703547954559326, | |
| "learning_rate": 7.252902849161436e-07, | |
| "loss": 1.0692, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.0488845109939575, | |
| "learning_rate": 7.124375987469767e-07, | |
| "loss": 0.8355, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.8898780345916748, | |
| "learning_rate": 6.996956050976878e-07, | |
| "loss": 1.1218, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.9564811587333679, | |
| "learning_rate": 6.870644558319528e-07, | |
| "loss": 1.0741, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9528207182884216, | |
| "learning_rate": 6.745443014923658e-07, | |
| "loss": 1.0141, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.035481572151184, | |
| "learning_rate": 6.621352912986468e-07, | |
| "loss": 0.9766, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9905387163162231, | |
| "learning_rate": 6.498375731458529e-07, | |
| "loss": 1.0135, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9403073787689209, | |
| "learning_rate": 6.37651293602628e-07, | |
| "loss": 1.0598, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9994465112686157, | |
| "learning_rate": 6.255765979094519e-07, | |
| "loss": 1.0681, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.1080148220062256, | |
| "learning_rate": 6.136136299768991e-07, | |
| "loss": 0.9418, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9642373323440552, | |
| "learning_rate": 6.017625323839415e-07, | |
| "loss": 0.9507, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 0.9195955395698547, | |
| "learning_rate": 5.900234463762367e-07, | |
| "loss": 1.1663, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 1.0112338066101074, | |
| "learning_rate": 5.783965118644441e-07, | |
| "loss": 0.9215, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.0380650758743286, | |
| "learning_rate": 5.668818674225684e-07, | |
| "loss": 0.9195, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.9641549587249756, | |
| "learning_rate": 5.554796502862958e-07, | |
| "loss": 1.0519, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.9936041831970215, | |
| "learning_rate": 5.441899963513631e-07, | |
| "loss": 1.0816, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.1456196308135986, | |
| "learning_rate": 5.330130401719413e-07, | |
| "loss": 0.895, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.0453975200653076, | |
| "learning_rate": 5.219489149590251e-07, | |
| "loss": 1.1018, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.9106210470199585, | |
| "learning_rate": 5.109977525788512e-07, | |
| "loss": 1.0022, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 1.1100226640701294, | |
| "learning_rate": 5.001596835513256e-07, | |
| "loss": 1.0069, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.9294660687446594, | |
| "learning_rate": 4.894348370484648e-07, | |
| "loss": 0.9952, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 0.9274271726608276, | |
| "learning_rate": 4.788233408928588e-07, | |
| "loss": 1.0738, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.0076048374176025, | |
| "learning_rate": 4.6832532155614895e-07, | |
| "loss": 0.9935, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.0656659603118896, | |
| "learning_rate": 4.5794090415751666e-07, | |
| "loss": 0.9714, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.9729741215705872, | |
| "learning_rate": 4.4767021246219566e-07, | |
| "loss": 1.0757, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.9909480810165405, | |
| "learning_rate": 4.3751336887999597e-07, | |
| "loss": 0.9922, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.027937889099121, | |
| "learning_rate": 4.27470494463843e-07, | |
| "loss": 0.9988, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.9245307445526123, | |
| "learning_rate": 4.1754170890833777e-07, | |
| "loss": 0.9783, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.054545283317566, | |
| "learning_rate": 4.077271305483321e-07, | |
| "loss": 1.0797, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 0.9386679530143738, | |
| "learning_rate": 3.980268763575079e-07, | |
| "loss": 1.0087, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.0545976161956787, | |
| "learning_rate": 3.8844106194699696e-07, | |
| "loss": 1.0633, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 1.162606120109558, | |
| "learning_rate": 3.7896980156399533e-07, | |
| "loss": 0.987, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.9432545900344849, | |
| "learning_rate": 3.6961320809039914e-07, | |
| "loss": 1.0143, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.8689368963241577, | |
| "learning_rate": 3.603713930414676e-07, | |
| "loss": 1.0705, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.0489226579666138, | |
| "learning_rate": 3.5124446656448654e-07, | |
| "loss": 0.8796, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.0109175443649292, | |
| "learning_rate": 3.42232537437458e-07, | |
| "loss": 1.036, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.943357527256012, | |
| "learning_rate": 3.33335713067805e-07, | |
| "loss": 1.0574, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.022092342376709, | |
| "learning_rate": 3.245540994910934e-07, | |
| "loss": 1.0293, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.0993645191192627, | |
| "learning_rate": 3.158878013697586e-07, | |
| "loss": 0.9948, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.997831404209137, | |
| "learning_rate": 3.073369219918698e-07, | |
| "loss": 1.0734, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 0.949984073638916, | |
| "learning_rate": 2.989015632698944e-07, | |
| "loss": 1.0832, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.9783075451850891, | |
| "learning_rate": 2.905818257394799e-07, | |
| "loss": 0.9935, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.0399396419525146, | |
| "learning_rate": 2.8237780855825957e-07, | |
| "loss": 0.8859, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.8728951215744019, | |
| "learning_rate": 2.742896095046732e-07, | |
| "loss": 1.0833, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.0082359313964844, | |
| "learning_rate": 2.6631732497679363e-07, | |
| "loss": 1.0854, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.0016460418701172, | |
| "learning_rate": 2.584610499911833e-07, | |
| "loss": 0.9685, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.0976165533065796, | |
| "learning_rate": 2.507208781817638e-07, | |
| "loss": 0.9965, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.9641050100326538, | |
| "learning_rate": 2.4309690179869503e-07, | |
| "loss": 1.0663, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 0.9330260157585144, | |
| "learning_rate": 2.355892117072789e-07, | |
| "loss": 1.0114, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.1164205074310303, | |
| "learning_rate": 2.2819789738687482e-07, | |
| "loss": 0.9318, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.9885598421096802, | |
| "learning_rate": 2.2092304692983402e-07, | |
| "loss": 1.171, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.932279646396637, | |
| "learning_rate": 2.1376474704044693e-07, | |
| "loss": 0.9213, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.1109699010849, | |
| "learning_rate": 2.067230830339184e-07, | |
| "loss": 0.9928, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.9438067674636841, | |
| "learning_rate": 1.9979813883533762e-07, | |
| "loss": 1.0135, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.9470593929290771, | |
| "learning_rate": 1.929899969786897e-07, | |
| "loss": 1.0703, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.0024571418762207, | |
| "learning_rate": 1.8629873860586567e-07, | |
| "loss": 1.0782, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.0540047883987427, | |
| "learning_rate": 1.7972444346569752e-07, | |
| "loss": 0.9218, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.9383503794670105, | |
| "learning_rate": 1.7326718991300563e-07, | |
| "loss": 0.9661, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 0.8709021806716919, | |
| "learning_rate": 1.6692705490766958e-07, | |
| "loss": 1.1834, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.0119833946228027, | |
| "learning_rate": 1.6070411401370335e-07, | |
| "loss": 0.9163, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.1062003374099731, | |
| "learning_rate": 1.5459844139836476e-07, | |
| "loss": 0.9145, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.9511232376098633, | |
| "learning_rate": 1.4861010983126202e-07, | |
| "loss": 1.0561, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.9989222288131714, | |
| "learning_rate": 1.4273919068349184e-07, | |
| "loss": 1.0758, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.9592758417129517, | |
| "learning_rate": 1.3698575392678492e-07, | |
| "loss": 0.9915, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.066810965538025, | |
| "learning_rate": 1.3134986813267968e-07, | |
| "loss": 0.928, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.9115373492240906, | |
| "learning_rate": 1.258316004716953e-07, | |
| "loss": 1.0767, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 1.0231889486312866, | |
| "learning_rate": 1.2043101671253553e-07, | |
| "loss": 0.8751, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.9903556108474731, | |
| "learning_rate": 1.1514818122130844e-07, | |
| "loss": 1.0211, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 0.9360074400901794, | |
| "learning_rate": 1.0998315696075123e-07, | |
| "loss": 1.0674, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.9390724301338196, | |
| "learning_rate": 1.0493600548948879e-07, | |
| "loss": 0.9986, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.1011719703674316, | |
| "learning_rate": 1.0000678696129307e-07, | |
| "loss": 0.9687, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.017079472541809, | |
| "learning_rate": 9.519556012436815e-08, | |
| "loss": 1.066, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.9542065262794495, | |
| "learning_rate": 9.0502382320653e-08, | |
| "loss": 0.9943, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.9380577206611633, | |
| "learning_rate": 8.592730948513205e-08, | |
| "loss": 1.0688, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.8784124851226807, | |
| "learning_rate": 8.147039614517571e-08, | |
| "loss": 1.0244, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.0768616199493408, | |
| "learning_rate": 7.71316954198853e-08, | |
| "loss": 1.0053, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.9789505004882812, | |
| "learning_rate": 7.291125901946027e-08, | |
| "loss": 1.0111, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.9437596797943115, | |
| "learning_rate": 6.880913724458538e-08, | |
| "loss": 1.059, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 1.027599811553955, | |
| "learning_rate": 6.482537898582886e-08, | |
| "loss": 0.9881, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.0822994709014893, | |
| "learning_rate": 6.096003172305742e-08, | |
| "loss": 0.9175, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.8413906097412109, | |
| "learning_rate": 5.721314152487556e-08, | |
| "loss": 1.0772, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.0318522453308105, | |
| "learning_rate": 5.3584753048073756e-08, | |
| "loss": 0.8773, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.9622299075126648, | |
| "learning_rate": 5.007490953709227e-08, | |
| "loss": 1.0422, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.9367170929908752, | |
| "learning_rate": 4.6683652823513725e-08, | |
| "loss": 1.0614, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.9283831715583801, | |
| "learning_rate": 4.3411023325560245e-08, | |
| "loss": 1.0228, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.0740995407104492, | |
| "learning_rate": 4.025706004760932e-08, | |
| "loss": 0.9969, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.9685555696487427, | |
| "learning_rate": 3.7221800579735346e-08, | |
| "loss": 1.0949, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 0.952633261680603, | |
| "learning_rate": 3.430528109725439e-08, | |
| "loss": 1.074, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9568140506744385, | |
| "learning_rate": 3.150753636029902e-08, | |
| "loss": 1.0088, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9952855706214905, | |
| "learning_rate": 2.8828599713398575e-08, | |
| "loss": 1.0022, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.982025682926178, | |
| "learning_rate": 2.6268503085089547e-08, | |
| "loss": 0.9851, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9688462018966675, | |
| "learning_rate": 2.3827276987524738e-08, | |
| "loss": 1.0815, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.0790618658065796, | |
| "learning_rate": 2.1504950516118007e-08, | |
| "loss": 0.8817, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9968244433403015, | |
| "learning_rate": 1.9301551349195648e-08, | |
| "loss": 1.0805, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9625927805900574, | |
| "learning_rate": 1.721710574766333e-08, | |
| "loss": 1.0762, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 0.9702088832855225, | |
| "learning_rate": 1.5251638554694137e-08, | |
| "loss": 1.0013, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 1.01020085811615, | |
| "learning_rate": 1.340517319543877e-08, | |
| "loss": 1.014, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.9813044667243958, | |
| "learning_rate": 1.1677731676733584e-08, | |
| "loss": 1.0617, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.9475674629211426, | |
| "learning_rate": 1.0069334586854106e-08, | |
| "loss": 1.0161, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.0829815864562988, | |
| "learning_rate": 8.580001095253032e-09, | |
| "loss": 0.8973, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.9402217864990234, | |
| "learning_rate": 7.209748952347051e-09, | |
| "loss": 1.0152, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.0154670476913452, | |
| "learning_rate": 5.958594489295921e-09, | |
| "loss": 1.0904, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.016912817955017, | |
| "learning_rate": 4.826552617807067e-09, | |
| "loss": 0.9959, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.023099422454834, | |
| "learning_rate": 3.8136368299668266e-09, | |
| "loss": 0.9513, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.9503531455993652, | |
| "learning_rate": 2.9198591980705847e-09, | |
| "loss": 1.0682, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 0.9064751863479614, | |
| "learning_rate": 2.145230374481777e-09, | |
| "loss": 1.1034, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 1.104668140411377, | |
| "learning_rate": 1.4897595915053242e-09, | |
| "loss": 1.0063, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.8932612538337708, | |
| "learning_rate": 9.534546612810502e-10, | |
| "loss": 0.9229, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.9584989547729492, | |
| "learning_rate": 5.363219756837624e-10, | |
| "loss": 1.1634, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.9628061056137085, | |
| "learning_rate": 2.3836650624997627e-10, | |
| "loss": 0.9712, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 1.072252631187439, | |
| "learning_rate": 5.959180412129506e-11, | |
| "loss": 1.0077, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.8910067081451416, | |
| "learning_rate": 0.0, | |
| "loss": 1.0284, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 939, | |
| "total_flos": 0.0, | |
| "train_loss": 1.108175155326835, | |
| "train_runtime": 5394.2035, | |
| "train_samples_per_second": 201.41, | |
| "train_steps_per_second": 0.174 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 939, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 6, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |