| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 7.0, | |
| "eval_steps": 500, | |
| "global_step": 231, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.030303030303030304, | |
| "grad_norm": 6.312150870591461, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 1.0115, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.06060606060606061, | |
| "grad_norm": 6.475694856091368, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.0286, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.09090909090909091, | |
| "grad_norm": 6.510110526436135, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.0389, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.12121212121212122, | |
| "grad_norm": 5.981791395592498, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.0259, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.15151515151515152, | |
| "grad_norm": 4.53026982334595, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 0.9918, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.18181818181818182, | |
| "grad_norm": 2.8104793581280285, | |
| "learning_rate": 5e-06, | |
| "loss": 0.9379, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.21212121212121213, | |
| "grad_norm": 2.567100887895647, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 0.9482, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.24242424242424243, | |
| "grad_norm": 3.892397384715512, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 0.9188, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.2727272727272727, | |
| "grad_norm": 4.142874339222047, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.9283, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.30303030303030304, | |
| "grad_norm": 4.138619036300646, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 0.9153, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 3.373497058955061, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 0.8984, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.36363636363636365, | |
| "grad_norm": 2.8583835585294537, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8745, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.3939393939393939, | |
| "grad_norm": 2.0867771671497706, | |
| "learning_rate": 1.0833333333333334e-05, | |
| "loss": 0.8407, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.42424242424242425, | |
| "grad_norm": 1.4253099021132698, | |
| "learning_rate": 1.1666666666666668e-05, | |
| "loss": 0.8072, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.45454545454545453, | |
| "grad_norm": 1.5431649525602196, | |
| "learning_rate": 1.25e-05, | |
| "loss": 0.8059, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.48484848484848486, | |
| "grad_norm": 1.2817611290592914, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.7992, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.5151515151515151, | |
| "grad_norm": 1.1356040619726908, | |
| "learning_rate": 1.416666666666667e-05, | |
| "loss": 0.781, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.5454545454545454, | |
| "grad_norm": 1.1421250437266164, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.7467, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.5757575757575758, | |
| "grad_norm": 1.0425455417206984, | |
| "learning_rate": 1.5833333333333333e-05, | |
| "loss": 0.7772, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.6060606060606061, | |
| "grad_norm": 0.9574395482369085, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 0.7322, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.6363636363636364, | |
| "grad_norm": 1.0470442958944162, | |
| "learning_rate": 1.7500000000000002e-05, | |
| "loss": 0.7542, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 1.0529632063486736, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "loss": 0.7272, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.696969696969697, | |
| "grad_norm": 1.169220818599545, | |
| "learning_rate": 1.916666666666667e-05, | |
| "loss": 0.7264, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.7272727272727273, | |
| "grad_norm": 0.9407356437242442, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7198, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.7575757575757576, | |
| "grad_norm": 1.0131670420617127, | |
| "learning_rate": 1.999884834944106e-05, | |
| "loss": 0.7314, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.7878787878787878, | |
| "grad_norm": 0.9558934157548457, | |
| "learning_rate": 1.9995393663024054e-05, | |
| "loss": 0.7092, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.8181818181818182, | |
| "grad_norm": 0.7862337023546967, | |
| "learning_rate": 1.9989636736467278e-05, | |
| "loss": 0.7022, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.8484848484848485, | |
| "grad_norm": 0.8015449395501483, | |
| "learning_rate": 1.9981578895764272e-05, | |
| "loss": 0.7131, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.8787878787878788, | |
| "grad_norm": 0.8604817636158697, | |
| "learning_rate": 1.9971221996878395e-05, | |
| "loss": 0.7071, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 0.7854145700441739, | |
| "learning_rate": 1.9958568425315316e-05, | |
| "loss": 0.696, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.9393939393939394, | |
| "grad_norm": 0.6225530941896312, | |
| "learning_rate": 1.9943621095573588e-05, | |
| "loss": 0.6941, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.9696969696969697, | |
| "grad_norm": 0.7778316880329571, | |
| "learning_rate": 1.9926383450473344e-05, | |
| "loss": 0.6933, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.5277454875573591, | |
| "learning_rate": 1.9906859460363307e-05, | |
| "loss": 0.6915, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 1.0303030303030303, | |
| "grad_norm": 0.5805460635972389, | |
| "learning_rate": 1.9885053622206305e-05, | |
| "loss": 0.6447, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.0606060606060606, | |
| "grad_norm": 0.6019817869943789, | |
| "learning_rate": 1.986097095854347e-05, | |
| "loss": 0.6683, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.0909090909090908, | |
| "grad_norm": 0.5088210340957223, | |
| "learning_rate": 1.9834617016337424e-05, | |
| "loss": 0.6472, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.121212121212121, | |
| "grad_norm": 0.5962209257067279, | |
| "learning_rate": 1.9805997865694616e-05, | |
| "loss": 0.6632, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 1.1515151515151516, | |
| "grad_norm": 0.5831832792689964, | |
| "learning_rate": 1.9775120098467212e-05, | |
| "loss": 0.6554, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 1.1818181818181819, | |
| "grad_norm": 0.5817683452118627, | |
| "learning_rate": 1.9741990826734793e-05, | |
| "loss": 0.6336, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 1.2121212121212122, | |
| "grad_norm": 0.5669925733078411, | |
| "learning_rate": 1.970661768116622e-05, | |
| "loss": 0.6408, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.2424242424242424, | |
| "grad_norm": 0.6105774979030593, | |
| "learning_rate": 1.9669008809262064e-05, | |
| "loss": 0.6303, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 1.2727272727272727, | |
| "grad_norm": 0.6142577901965711, | |
| "learning_rate": 1.9629172873477995e-05, | |
| "loss": 0.6398, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 1.303030303030303, | |
| "grad_norm": 0.5337799657097911, | |
| "learning_rate": 1.9587119049229558e-05, | |
| "loss": 0.6286, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.5572020819014635, | |
| "learning_rate": 1.954285702277879e-05, | |
| "loss": 0.6402, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.3636363636363638, | |
| "grad_norm": 0.4872837857719787, | |
| "learning_rate": 1.9496396989003195e-05, | |
| "loss": 0.6241, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.393939393939394, | |
| "grad_norm": 0.5181010202053239, | |
| "learning_rate": 1.944774964904754e-05, | |
| "loss": 0.6293, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.4242424242424243, | |
| "grad_norm": 0.5967191523625646, | |
| "learning_rate": 1.9396926207859085e-05, | |
| "loss": 0.6316, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.4545454545454546, | |
| "grad_norm": 0.5703718513429342, | |
| "learning_rate": 1.9343938371606714e-05, | |
| "loss": 0.6256, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.4848484848484849, | |
| "grad_norm": 0.6541547573166652, | |
| "learning_rate": 1.9288798344984673e-05, | |
| "loss": 0.6123, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.5151515151515151, | |
| "grad_norm": 0.6609780061398919, | |
| "learning_rate": 1.9231518828401458e-05, | |
| "loss": 0.6221, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.5454545454545454, | |
| "grad_norm": 0.6894966357223148, | |
| "learning_rate": 1.917211301505453e-05, | |
| "loss": 0.6248, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.5757575757575757, | |
| "grad_norm": 0.5939539044353149, | |
| "learning_rate": 1.911059458789152e-05, | |
| "loss": 0.6407, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.606060606060606, | |
| "grad_norm": 0.7038110533728202, | |
| "learning_rate": 1.9046977716458627e-05, | |
| "loss": 0.6184, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.6363636363636362, | |
| "grad_norm": 0.62732059189463, | |
| "learning_rate": 1.8981277053636963e-05, | |
| "loss": 0.6129, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.6666666666666665, | |
| "grad_norm": 0.5427046281169445, | |
| "learning_rate": 1.891350773226754e-05, | |
| "loss": 0.6179, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.696969696969697, | |
| "grad_norm": 0.6194003921539317, | |
| "learning_rate": 1.8843685361665724e-05, | |
| "loss": 0.6113, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.7272727272727273, | |
| "grad_norm": 0.6410974021298932, | |
| "learning_rate": 1.8771826024025944e-05, | |
| "loss": 0.6173, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.7575757575757576, | |
| "grad_norm": 0.5779009695922482, | |
| "learning_rate": 1.8697946270717468e-05, | |
| "loss": 0.617, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.7878787878787878, | |
| "grad_norm": 0.5147582250315594, | |
| "learning_rate": 1.8622063118472135e-05, | |
| "loss": 0.6085, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.8181818181818183, | |
| "grad_norm": 0.5671710300061014, | |
| "learning_rate": 1.8544194045464888e-05, | |
| "loss": 0.6203, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.8484848484848486, | |
| "grad_norm": 0.7941211783306206, | |
| "learning_rate": 1.8464356987288012e-05, | |
| "loss": 0.6146, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.878787878787879, | |
| "grad_norm": 0.7506842382530591, | |
| "learning_rate": 1.8382570332820045e-05, | |
| "loss": 0.6063, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.9090909090909092, | |
| "grad_norm": 0.5258601890372588, | |
| "learning_rate": 1.8298852919990254e-05, | |
| "loss": 0.6301, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.9393939393939394, | |
| "grad_norm": 0.8268877793831794, | |
| "learning_rate": 1.821322403143969e-05, | |
| "loss": 0.6001, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.9696969696969697, | |
| "grad_norm": 0.5581536666536359, | |
| "learning_rate": 1.812570339007983e-05, | |
| "loss": 0.6119, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.600234570523279, | |
| "learning_rate": 1.8036311154549783e-05, | |
| "loss": 0.6099, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 2.0303030303030303, | |
| "grad_norm": 0.7265544201056052, | |
| "learning_rate": 1.7945067914573147e-05, | |
| "loss": 0.5596, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 2.0606060606060606, | |
| "grad_norm": 0.5248402931161873, | |
| "learning_rate": 1.7851994686215592e-05, | |
| "loss": 0.5585, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 2.090909090909091, | |
| "grad_norm": 0.585676191853421, | |
| "learning_rate": 1.77571129070442e-05, | |
| "loss": 0.56, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 2.121212121212121, | |
| "grad_norm": 0.7015489638418316, | |
| "learning_rate": 1.766044443118978e-05, | |
| "loss": 0.5631, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.1515151515151514, | |
| "grad_norm": 0.5842445543540288, | |
| "learning_rate": 1.7562011524313187e-05, | |
| "loss": 0.5491, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 2.1818181818181817, | |
| "grad_norm": 0.51243844443462, | |
| "learning_rate": 1.7461836858476858e-05, | |
| "loss": 0.5528, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 2.212121212121212, | |
| "grad_norm": 0.7150352543491989, | |
| "learning_rate": 1.7359943506922775e-05, | |
| "loss": 0.545, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 2.242424242424242, | |
| "grad_norm": 0.5141968817426051, | |
| "learning_rate": 1.725635493875799e-05, | |
| "loss": 0.547, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 2.2727272727272725, | |
| "grad_norm": 0.585858728549435, | |
| "learning_rate": 1.7151095013548996e-05, | |
| "loss": 0.5488, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 2.303030303030303, | |
| "grad_norm": 0.7866792966684663, | |
| "learning_rate": 1.7044187975826126e-05, | |
| "loss": 0.5519, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 2.3333333333333335, | |
| "grad_norm": 0.5061107670338666, | |
| "learning_rate": 1.693565844949933e-05, | |
| "loss": 0.5499, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 2.3636363636363638, | |
| "grad_norm": 0.6469169079163084, | |
| "learning_rate": 1.6825531432186545e-05, | |
| "loss": 0.5317, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 2.393939393939394, | |
| "grad_norm": 0.6060546083978395, | |
| "learning_rate": 1.671383228945597e-05, | |
| "loss": 0.5524, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 2.4242424242424243, | |
| "grad_norm": 0.4696097237633282, | |
| "learning_rate": 1.6600586748983642e-05, | |
| "loss": 0.5426, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.4545454545454546, | |
| "grad_norm": 0.5399242385302314, | |
| "learning_rate": 1.648582089462756e-05, | |
| "loss": 0.5494, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 2.484848484848485, | |
| "grad_norm": 0.48245852694053143, | |
| "learning_rate": 1.6369561160419783e-05, | |
| "loss": 0.5382, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 2.515151515151515, | |
| "grad_norm": 0.47001514713374226, | |
| "learning_rate": 1.625183432447789e-05, | |
| "loss": 0.5564, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 2.5454545454545454, | |
| "grad_norm": 0.5618528608673509, | |
| "learning_rate": 1.6132667502837164e-05, | |
| "loss": 0.5504, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 2.5757575757575757, | |
| "grad_norm": 0.525585513155995, | |
| "learning_rate": 1.6012088143204953e-05, | |
| "loss": 0.5366, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 2.606060606060606, | |
| "grad_norm": 0.47441334531625917, | |
| "learning_rate": 1.589012401863864e-05, | |
| "loss": 0.5382, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 2.6363636363636362, | |
| "grad_norm": 0.6093467259870757, | |
| "learning_rate": 1.5766803221148676e-05, | |
| "loss": 0.5491, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.561336046190135, | |
| "learning_rate": 1.5642154155228124e-05, | |
| "loss": 0.5387, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 2.6969696969696972, | |
| "grad_norm": 0.49872044831217777, | |
| "learning_rate": 1.5516205531310272e-05, | |
| "loss": 0.5374, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 2.7272727272727275, | |
| "grad_norm": 0.45268268638401327, | |
| "learning_rate": 1.538898635915576e-05, | |
| "loss": 0.539, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.757575757575758, | |
| "grad_norm": 0.49399347923936493, | |
| "learning_rate": 1.526052594117071e-05, | |
| "loss": 0.5483, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 2.787878787878788, | |
| "grad_norm": 0.5051110919057764, | |
| "learning_rate": 1.513085386565758e-05, | |
| "loss": 0.524, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 2.8181818181818183, | |
| "grad_norm": 0.5378366080363427, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.5512, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 2.8484848484848486, | |
| "grad_norm": 0.46256558272521914, | |
| "learning_rate": 1.4867994483783485e-05, | |
| "loss": 0.5448, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 2.878787878787879, | |
| "grad_norm": 0.5939458305415402, | |
| "learning_rate": 1.4734867721853341e-05, | |
| "loss": 0.541, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 2.909090909090909, | |
| "grad_norm": 0.48938531706905364, | |
| "learning_rate": 1.4600650377311523e-05, | |
| "loss": 0.5473, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 2.9393939393939394, | |
| "grad_norm": 0.5012717641433172, | |
| "learning_rate": 1.4465373364454001e-05, | |
| "loss": 0.5615, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 2.9696969696969697, | |
| "grad_norm": 0.534292141394524, | |
| "learning_rate": 1.4329067841650274e-05, | |
| "loss": 0.535, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.534314937899243, | |
| "learning_rate": 1.4191765204166643e-05, | |
| "loss": 0.5424, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 3.0303030303030303, | |
| "grad_norm": 0.5582906692438339, | |
| "learning_rate": 1.4053497076934948e-05, | |
| "loss": 0.4838, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 3.0606060606060606, | |
| "grad_norm": 0.6022338728928069, | |
| "learning_rate": 1.3914295307268396e-05, | |
| "loss": 0.4822, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 3.090909090909091, | |
| "grad_norm": 0.6062975278971561, | |
| "learning_rate": 1.3774191957526144e-05, | |
| "loss": 0.4866, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 3.121212121212121, | |
| "grad_norm": 0.6965168962135356, | |
| "learning_rate": 1.3633219297728415e-05, | |
| "loss": 0.4875, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 3.1515151515151514, | |
| "grad_norm": 0.5740971337746343, | |
| "learning_rate": 1.3491409798123687e-05, | |
| "loss": 0.4973, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 3.1818181818181817, | |
| "grad_norm": 0.6037151795891081, | |
| "learning_rate": 1.3348796121709862e-05, | |
| "loss": 0.4838, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 3.212121212121212, | |
| "grad_norm": 0.5209861578836725, | |
| "learning_rate": 1.3205411116710973e-05, | |
| "loss": 0.4742, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 3.242424242424242, | |
| "grad_norm": 0.5456291719112359, | |
| "learning_rate": 1.3061287809011243e-05, | |
| "loss": 0.4888, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 3.2727272727272725, | |
| "grad_norm": 0.48032197533105836, | |
| "learning_rate": 1.291645939454825e-05, | |
| "loss": 0.491, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 3.303030303030303, | |
| "grad_norm": 0.4580756881783694, | |
| "learning_rate": 1.277095923166689e-05, | |
| "loss": 0.4843, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 3.3333333333333335, | |
| "grad_norm": 0.46235698412068515, | |
| "learning_rate": 1.2624820833435939e-05, | |
| "loss": 0.4916, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.3636363636363638, | |
| "grad_norm": 0.4489151537798375, | |
| "learning_rate": 1.2478077859929e-05, | |
| "loss": 0.4855, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 3.393939393939394, | |
| "grad_norm": 0.4383101762947854, | |
| "learning_rate": 1.2330764110471567e-05, | |
| "loss": 0.4858, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 3.4242424242424243, | |
| "grad_norm": 0.45451185564173385, | |
| "learning_rate": 1.2182913515856016e-05, | |
| "loss": 0.4755, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 3.4545454545454546, | |
| "grad_norm": 0.42050469812801256, | |
| "learning_rate": 1.2034560130526341e-05, | |
| "loss": 0.4803, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 3.484848484848485, | |
| "grad_norm": 0.45831672585375605, | |
| "learning_rate": 1.1885738124734359e-05, | |
| "loss": 0.4859, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 3.515151515151515, | |
| "grad_norm": 0.4798493173213465, | |
| "learning_rate": 1.1736481776669307e-05, | |
| "loss": 0.4758, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 3.5454545454545454, | |
| "grad_norm": 0.5047184645263595, | |
| "learning_rate": 1.1586825464562515e-05, | |
| "loss": 0.4733, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 3.5757575757575757, | |
| "grad_norm": 0.43050751564999873, | |
| "learning_rate": 1.1436803658769082e-05, | |
| "loss": 0.4843, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 3.606060606060606, | |
| "grad_norm": 0.507474061745988, | |
| "learning_rate": 1.1286450913828313e-05, | |
| "loss": 0.4752, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 3.6363636363636362, | |
| "grad_norm": 0.5027468266837049, | |
| "learning_rate": 1.113580186050475e-05, | |
| "loss": 0.4642, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 3.6666666666666665, | |
| "grad_norm": 0.5002534033302283, | |
| "learning_rate": 1.0984891197811686e-05, | |
| "loss": 0.475, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 3.6969696969696972, | |
| "grad_norm": 0.5099369195818414, | |
| "learning_rate": 1.0833753685018935e-05, | |
| "loss": 0.487, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 3.7272727272727275, | |
| "grad_norm": 0.48144394820768416, | |
| "learning_rate": 1.0682424133646712e-05, | |
| "loss": 0.4728, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 3.757575757575758, | |
| "grad_norm": 0.48509841581231067, | |
| "learning_rate": 1.0530937399447496e-05, | |
| "loss": 0.4822, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 3.787878787878788, | |
| "grad_norm": 0.5232355102786402, | |
| "learning_rate": 1.0379328374377715e-05, | |
| "loss": 0.4814, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 3.8181818181818183, | |
| "grad_norm": 0.4522597876855734, | |
| "learning_rate": 1.0227631978561057e-05, | |
| "loss": 0.4752, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 3.8484848484848486, | |
| "grad_norm": 0.48608519066110173, | |
| "learning_rate": 1.0075883152245334e-05, | |
| "loss": 0.4589, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 3.878787878787879, | |
| "grad_norm": 0.4175574940160927, | |
| "learning_rate": 9.92411684775467e-06, | |
| "loss": 0.4797, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 3.909090909090909, | |
| "grad_norm": 0.4081235310325298, | |
| "learning_rate": 9.772368021438943e-06, | |
| "loss": 0.4855, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 3.9393939393939394, | |
| "grad_norm": 0.4672381986062692, | |
| "learning_rate": 9.620671625622287e-06, | |
| "loss": 0.4701, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 3.9696969696969697, | |
| "grad_norm": 0.4013104550104878, | |
| "learning_rate": 9.469062600552509e-06, | |
| "loss": 0.4669, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.40311810148872756, | |
| "learning_rate": 9.317575866353293e-06, | |
| "loss": 0.4694, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 4.03030303030303, | |
| "grad_norm": 0.5253103516174084, | |
| "learning_rate": 9.166246314981066e-06, | |
| "loss": 0.4278, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 4.0606060606060606, | |
| "grad_norm": 0.4229995957542638, | |
| "learning_rate": 9.015108802188314e-06, | |
| "loss": 0.4199, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 4.090909090909091, | |
| "grad_norm": 0.4834993887824011, | |
| "learning_rate": 8.86419813949525e-06, | |
| "loss": 0.4233, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 4.121212121212121, | |
| "grad_norm": 0.5083836778846227, | |
| "learning_rate": 8.71354908617169e-06, | |
| "loss": 0.4134, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 4.151515151515151, | |
| "grad_norm": 0.4629826983464886, | |
| "learning_rate": 8.56319634123092e-06, | |
| "loss": 0.4153, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 4.181818181818182, | |
| "grad_norm": 0.4262442369411264, | |
| "learning_rate": 8.413174535437486e-06, | |
| "loss": 0.4245, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 4.212121212121212, | |
| "grad_norm": 0.48584922213655163, | |
| "learning_rate": 8.263518223330698e-06, | |
| "loss": 0.4163, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 4.242424242424242, | |
| "grad_norm": 0.43099027346452917, | |
| "learning_rate": 8.114261875265643e-06, | |
| "loss": 0.4219, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 4.2727272727272725, | |
| "grad_norm": 0.45792891092025656, | |
| "learning_rate": 7.965439869473664e-06, | |
| "loss": 0.4252, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 4.303030303030303, | |
| "grad_norm": 0.45821297586471343, | |
| "learning_rate": 7.817086484143987e-06, | |
| "loss": 0.4275, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 4.333333333333333, | |
| "grad_norm": 0.4440069719961142, | |
| "learning_rate": 7.669235889528436e-06, | |
| "loss": 0.4293, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 4.363636363636363, | |
| "grad_norm": 0.4061400289189544, | |
| "learning_rate": 7.521922140071003e-06, | |
| "loss": 0.4331, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 4.393939393939394, | |
| "grad_norm": 0.37815756277205015, | |
| "learning_rate": 7.375179166564062e-06, | |
| "loss": 0.408, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 4.424242424242424, | |
| "grad_norm": 0.4742561168827842, | |
| "learning_rate": 7.2290407683331154e-06, | |
| "loss": 0.4241, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 4.454545454545454, | |
| "grad_norm": 0.3850909440657431, | |
| "learning_rate": 7.0835406054517505e-06, | |
| "loss": 0.4199, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 4.484848484848484, | |
| "grad_norm": 0.3850307914357772, | |
| "learning_rate": 6.93871219098876e-06, | |
| "loss": 0.414, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 4.515151515151516, | |
| "grad_norm": 0.37628212538187894, | |
| "learning_rate": 6.79458888328903e-06, | |
| "loss": 0.4142, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 4.545454545454545, | |
| "grad_norm": 0.3697164917421783, | |
| "learning_rate": 6.651203878290139e-06, | |
| "loss": 0.4199, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 4.575757575757576, | |
| "grad_norm": 0.3936732478606088, | |
| "learning_rate": 6.508590201876317e-06, | |
| "loss": 0.4392, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 4.606060606060606, | |
| "grad_norm": 0.36979958098093274, | |
| "learning_rate": 6.366780702271589e-06, | |
| "loss": 0.429, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 4.636363636363637, | |
| "grad_norm": 0.4120222499856573, | |
| "learning_rate": 6.225808042473857e-06, | |
| "loss": 0.4245, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 4.666666666666667, | |
| "grad_norm": 0.37901191700407694, | |
| "learning_rate": 6.085704692731609e-06, | |
| "loss": 0.4339, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 4.696969696969697, | |
| "grad_norm": 0.34436938180267346, | |
| "learning_rate": 5.946502923065054e-06, | |
| "loss": 0.4195, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 4.7272727272727275, | |
| "grad_norm": 0.3954985824584074, | |
| "learning_rate": 5.8082347958333625e-06, | |
| "loss": 0.4257, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 4.757575757575758, | |
| "grad_norm": 0.3847038911028595, | |
| "learning_rate": 5.670932158349732e-06, | |
| "loss": 0.4105, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 4.787878787878788, | |
| "grad_norm": 0.35182897006844766, | |
| "learning_rate": 5.534626635546e-06, | |
| "loss": 0.4161, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 4.818181818181818, | |
| "grad_norm": 0.3395768604052704, | |
| "learning_rate": 5.399349622688479e-06, | |
| "loss": 0.4274, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 4.848484848484849, | |
| "grad_norm": 0.37610765942404767, | |
| "learning_rate": 5.2651322781466606e-06, | |
| "loss": 0.4093, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 4.878787878787879, | |
| "grad_norm": 0.34369090240001643, | |
| "learning_rate": 5.132005516216512e-06, | |
| "loss": 0.4117, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 4.909090909090909, | |
| "grad_norm": 0.3193574933254639, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.4244, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 4.9393939393939394, | |
| "grad_norm": 0.3223949271599354, | |
| "learning_rate": 4.869146134342426e-06, | |
| "loss": 0.4103, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 4.96969696969697, | |
| "grad_norm": 0.3244127700665561, | |
| "learning_rate": 4.739474058829288e-06, | |
| "loss": 0.413, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.32409787537740004, | |
| "learning_rate": 4.611013640844245e-06, | |
| "loss": 0.4165, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 5.03030303030303, | |
| "grad_norm": 0.4575500171903933, | |
| "learning_rate": 4.483794468689728e-06, | |
| "loss": 0.3721, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 5.0606060606060606, | |
| "grad_norm": 0.36883989819661467, | |
| "learning_rate": 4.357845844771881e-06, | |
| "loss": 0.3834, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 5.090909090909091, | |
| "grad_norm": 0.33445890390189614, | |
| "learning_rate": 4.2331967788513295e-06, | |
| "loss": 0.3911, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 5.121212121212121, | |
| "grad_norm": 0.41535879066134074, | |
| "learning_rate": 4.109875981361363e-06, | |
| "loss": 0.379, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 5.151515151515151, | |
| "grad_norm": 0.4967611991379499, | |
| "learning_rate": 3.987911856795047e-06, | |
| "loss": 0.3838, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 5.181818181818182, | |
| "grad_norm": 0.4117803449507636, | |
| "learning_rate": 3.867332497162836e-06, | |
| "loss": 0.3806, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 5.212121212121212, | |
| "grad_norm": 0.3805214746037457, | |
| "learning_rate": 3.748165675522113e-06, | |
| "loss": 0.3716, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 5.242424242424242, | |
| "grad_norm": 0.3538575739009125, | |
| "learning_rate": 3.630438839580217e-06, | |
| "loss": 0.3894, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 5.2727272727272725, | |
| "grad_norm": 0.4055868044908981, | |
| "learning_rate": 3.5141791053724405e-06, | |
| "loss": 0.3814, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 5.303030303030303, | |
| "grad_norm": 0.4047775213330113, | |
| "learning_rate": 3.399413251016359e-06, | |
| "loss": 0.3742, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 5.333333333333333, | |
| "grad_norm": 0.351123670955561, | |
| "learning_rate": 3.2861677105440335e-06, | |
| "loss": 0.3725, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 5.363636363636363, | |
| "grad_norm": 0.3587210333393083, | |
| "learning_rate": 3.174468567813461e-06, | |
| "loss": 0.376, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 5.393939393939394, | |
| "grad_norm": 0.38060043226029316, | |
| "learning_rate": 3.0643415505006733e-06, | |
| "loss": 0.3776, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 5.424242424242424, | |
| "grad_norm": 0.33805978166739914, | |
| "learning_rate": 2.9558120241738786e-06, | |
| "loss": 0.3796, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 5.454545454545454, | |
| "grad_norm": 0.3221363257994335, | |
| "learning_rate": 2.8489049864510053e-06, | |
| "loss": 0.3724, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 5.484848484848484, | |
| "grad_norm": 0.3248118221029093, | |
| "learning_rate": 2.7436450612420098e-06, | |
| "loss": 0.3768, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 5.515151515151516, | |
| "grad_norm": 0.3571193688032367, | |
| "learning_rate": 2.640056493077231e-06, | |
| "loss": 0.3718, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 5.545454545454545, | |
| "grad_norm": 0.325622710888302, | |
| "learning_rate": 2.5381631415231455e-06, | |
| "loss": 0.374, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 5.575757575757576, | |
| "grad_norm": 0.30970994937371304, | |
| "learning_rate": 2.4379884756868167e-06, | |
| "loss": 0.365, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 5.606060606060606, | |
| "grad_norm": 0.2939098855993905, | |
| "learning_rate": 2.339555568810221e-06, | |
| "loss": 0.3728, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 5.636363636363637, | |
| "grad_norm": 0.3327725237141841, | |
| "learning_rate": 2.2428870929558012e-06, | |
| "loss": 0.3696, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 5.666666666666667, | |
| "grad_norm": 0.33733530820889834, | |
| "learning_rate": 2.1480053137844115e-06, | |
| "loss": 0.3688, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 5.696969696969697, | |
| "grad_norm": 0.3267723664611921, | |
| "learning_rate": 2.054932085426856e-06, | |
| "loss": 0.3743, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 5.7272727272727275, | |
| "grad_norm": 0.30477416794739226, | |
| "learning_rate": 1.963688845450218e-06, | |
| "loss": 0.3771, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 5.757575757575758, | |
| "grad_norm": 0.31237110797322104, | |
| "learning_rate": 1.8742966099201699e-06, | |
| "loss": 0.3713, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 5.787878787878788, | |
| "grad_norm": 0.3270216603267311, | |
| "learning_rate": 1.7867759685603115e-06, | |
| "loss": 0.3735, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 5.818181818181818, | |
| "grad_norm": 0.3086374044328664, | |
| "learning_rate": 1.7011470800097496e-06, | |
| "loss": 0.3792, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 5.848484848484849, | |
| "grad_norm": 0.3154329043876322, | |
| "learning_rate": 1.6174296671799571e-06, | |
| "loss": 0.3746, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 5.878787878787879, | |
| "grad_norm": 0.3186084501919431, | |
| "learning_rate": 1.5356430127119915e-06, | |
| "loss": 0.386, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 5.909090909090909, | |
| "grad_norm": 0.3129459155979679, | |
| "learning_rate": 1.4558059545351144e-06, | |
| "loss": 0.3787, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 5.9393939393939394, | |
| "grad_norm": 0.2849812085273699, | |
| "learning_rate": 1.3779368815278648e-06, | |
| "loss": 0.3801, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 5.96969696969697, | |
| "grad_norm": 0.29770863297104755, | |
| "learning_rate": 1.302053729282533e-06, | |
| "loss": 0.3907, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.29769942695520835, | |
| "learning_rate": 1.2281739759740575e-06, | |
| "loss": 0.3755, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 6.03030303030303, | |
| "grad_norm": 0.37028292991289424, | |
| "learning_rate": 1.156314638334277e-06, | |
| "loss": 0.3658, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 6.0606060606060606, | |
| "grad_norm": 0.3640687593367054, | |
| "learning_rate": 1.086492267732462e-06, | |
| "loss": 0.3603, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 6.090909090909091, | |
| "grad_norm": 0.34956891428811765, | |
| "learning_rate": 1.01872294636304e-06, | |
| "loss": 0.3529, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 6.121212121212121, | |
| "grad_norm": 0.31208168864820524, | |
| "learning_rate": 9.530222835413739e-07, | |
| "loss": 0.359, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 6.151515151515151, | |
| "grad_norm": 0.2985599079997785, | |
| "learning_rate": 8.894054121084839e-07, | |
| "loss": 0.3641, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 6.181818181818182, | |
| "grad_norm": 0.3045681297282551, | |
| "learning_rate": 8.278869849454718e-07, | |
| "loss": 0.3479, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 6.212121212121212, | |
| "grad_norm": 0.3337677361693306, | |
| "learning_rate": 7.684811715985429e-07, | |
| "loss": 0.3661, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 6.242424242424242, | |
| "grad_norm": 0.330091275503482, | |
| "learning_rate": 7.1120165501533e-07, | |
| "loss": 0.3638, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 6.2727272727272725, | |
| "grad_norm": 0.33236530374834883, | |
| "learning_rate": 6.560616283932897e-07, | |
| "loss": 0.3585, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 6.303030303030303, | |
| "grad_norm": 0.33403382077537863, | |
| "learning_rate": 6.030737921409169e-07, | |
| "loss": 0.3523, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 6.333333333333333, | |
| "grad_norm": 0.3076896715478002, | |
| "learning_rate": 5.522503509524591e-07, | |
| "loss": 0.3562, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 6.363636363636363, | |
| "grad_norm": 0.2892632966187589, | |
| "learning_rate": 5.036030109968082e-07, | |
| "loss": 0.3558, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 6.393939393939394, | |
| "grad_norm": 0.3006521646154084, | |
| "learning_rate": 4.5714297722121105e-07, | |
| "loss": 0.3598, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 6.424242424242424, | |
| "grad_norm": 0.29904963807062096, | |
| "learning_rate": 4.128809507704445e-07, | |
| "loss": 0.3521, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 6.454545454545454, | |
| "grad_norm": 0.29793493646756863, | |
| "learning_rate": 3.708271265220087e-07, | |
| "loss": 0.349, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 6.484848484848484, | |
| "grad_norm": 0.2935354245570347, | |
| "learning_rate": 3.309911907379393e-07, | |
| "loss": 0.3649, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 6.515151515151516, | |
| "grad_norm": 0.27964737258039646, | |
| "learning_rate": 2.9338231883378365e-07, | |
| "loss": 0.3576, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 6.545454545454545, | |
| "grad_norm": 0.301582739448306, | |
| "learning_rate": 2.5800917326521013e-07, | |
| "loss": 0.3602, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 6.575757575757576, | |
| "grad_norm": 0.2936742547927523, | |
| "learning_rate": 2.248799015327907e-07, | |
| "loss": 0.3573, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 6.606060606060606, | |
| "grad_norm": 0.29954291916250736, | |
| "learning_rate": 1.9400213430538773e-07, | |
| "loss": 0.3546, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 6.636363636363637, | |
| "grad_norm": 0.27606871733537713, | |
| "learning_rate": 1.6538298366257975e-07, | |
| "loss": 0.3578, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 6.666666666666667, | |
| "grad_norm": 0.30802643125453266, | |
| "learning_rate": 1.3902904145653094e-07, | |
| "loss": 0.3449, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 6.696969696969697, | |
| "grad_norm": 0.2962737721620968, | |
| "learning_rate": 1.1494637779369766e-07, | |
| "loss": 0.3513, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 6.7272727272727275, | |
| "grad_norm": 0.2747413575856551, | |
| "learning_rate": 9.314053963669245e-08, | |
| "loss": 0.3638, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 6.757575757575758, | |
| "grad_norm": 0.2854609348497825, | |
| "learning_rate": 7.361654952665608e-08, | |
| "loss": 0.3538, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 6.787878787878788, | |
| "grad_norm": 0.28835820053227823, | |
| "learning_rate": 5.637890442641403e-08, | |
| "loss": 0.3525, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 6.818181818181818, | |
| "grad_norm": 0.2912380841058537, | |
| "learning_rate": 4.143157468468717e-08, | |
| "loss": 0.352, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 6.848484848484849, | |
| "grad_norm": 0.2752077854667188, | |
| "learning_rate": 2.8778003121607834e-08, | |
| "loss": 0.3573, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 6.878787878787879, | |
| "grad_norm": 0.2879200632137298, | |
| "learning_rate": 1.8421104235727406e-08, | |
| "loss": 0.3513, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 6.909090909090909, | |
| "grad_norm": 0.2906879499875626, | |
| "learning_rate": 1.0363263532724433e-08, | |
| "loss": 0.3506, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 6.9393939393939394, | |
| "grad_norm": 0.2787965911550042, | |
| "learning_rate": 4.606336975948589e-09, | |
| "loss": 0.3495, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 6.96969696969697, | |
| "grad_norm": 0.27756524800483817, | |
| "learning_rate": 1.1516505589381777e-09, | |
| "loss": 0.3507, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 0.2770002873936819, | |
| "learning_rate": 0.0, | |
| "loss": 0.357, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "step": 231, | |
| "total_flos": 6.474382962841354e+17, | |
| "train_loss": 0.5183511296392003, | |
| "train_runtime": 8671.5363, | |
| "train_samples_per_second": 2.551, | |
| "train_steps_per_second": 0.027 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 231, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 7, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.474382962841354e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |