{ "best_global_step": null, "best_metric": null, "best_model_checkpoint": null, "epoch": 1.0, "eval_steps": 500, "global_step": 19613, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0005098659052669148, "grad_norm": 946.1547547167231, "learning_rate": 1.5280135823429543e-06, "loss": 9.677, "step": 10 }, { "epoch": 0.0010197318105338296, "grad_norm": 544.7247510656614, "learning_rate": 3.225806451612903e-06, "loss": 8.2775, "step": 20 }, { "epoch": 0.0015295977158007444, "grad_norm": 1728.4541198728905, "learning_rate": 4.923599320882852e-06, "loss": 7.1393, "step": 30 }, { "epoch": 0.002039463621067659, "grad_norm": 479.729349357684, "learning_rate": 6.621392190152802e-06, "loss": 6.1852, "step": 40 }, { "epoch": 0.002549329526334574, "grad_norm": 894.5276149271358, "learning_rate": 8.31918505942275e-06, "loss": 5.4336, "step": 50 }, { "epoch": 0.0030591954316014887, "grad_norm": 591.4100785573886, "learning_rate": 1.00169779286927e-05, "loss": 5.0479, "step": 60 }, { "epoch": 0.0035690613368684035, "grad_norm": 299.20502596928674, "learning_rate": 1.171477079796265e-05, "loss": 4.6588, "step": 70 }, { "epoch": 0.004078927242135318, "grad_norm": 326.0090819574599, "learning_rate": 1.3412563667232598e-05, "loss": 4.4741, "step": 80 }, { "epoch": 0.0045887931474022335, "grad_norm": 553.1152730771238, "learning_rate": 1.5110356536502549e-05, "loss": 4.3923, "step": 90 }, { "epoch": 0.005098659052669148, "grad_norm": 429.2200639978728, "learning_rate": 1.6808149405772495e-05, "loss": 4.3827, "step": 100 }, { "epoch": 0.005608524957936063, "grad_norm": 217.043209696176, "learning_rate": 1.8505942275042447e-05, "loss": 4.3381, "step": 110 }, { "epoch": 0.0061183908632029774, "grad_norm": 71.5225482068816, "learning_rate": 2.0203735144312395e-05, "loss": 4.1111, "step": 120 }, { "epoch": 0.006628256768469893, "grad_norm": 59.30962303454184, "learning_rate": 2.1901528013582346e-05, "loss": 4.0054, "step": 130 }, { "epoch": 0.007138122673736807, "grad_norm": 44.78043152216485, "learning_rate": 2.3599320882852294e-05, "loss": 3.9987, "step": 140 }, { "epoch": 0.007647988579003722, "grad_norm": 86.47447277328804, "learning_rate": 2.5297113752122242e-05, "loss": 3.8178, "step": 150 }, { "epoch": 0.008157854484270637, "grad_norm": 33.444719477873726, "learning_rate": 2.699490662139219e-05, "loss": 3.6614, "step": 160 }, { "epoch": 0.008667720389537552, "grad_norm": 48.632312360697625, "learning_rate": 2.869269949066214e-05, "loss": 3.5938, "step": 170 }, { "epoch": 0.009177586294804467, "grad_norm": 43.824964873367996, "learning_rate": 3.0390492359932087e-05, "loss": 3.5408, "step": 180 }, { "epoch": 0.00968745220007138, "grad_norm": 48.440193697215264, "learning_rate": 3.2088285229202035e-05, "loss": 3.4292, "step": 190 }, { "epoch": 0.010197318105338296, "grad_norm": 46.372222786863986, "learning_rate": 3.378607809847199e-05, "loss": 3.2039, "step": 200 }, { "epoch": 0.010707184010605211, "grad_norm": 82.68846959999989, "learning_rate": 3.548387096774194e-05, "loss": 3.0534, "step": 210 }, { "epoch": 0.011217049915872126, "grad_norm": 99.53466045235125, "learning_rate": 3.7181663837011886e-05, "loss": 2.9101, "step": 220 }, { "epoch": 0.01172691582113904, "grad_norm": 64.16723536538507, "learning_rate": 3.8879456706281834e-05, "loss": 2.8509, "step": 230 }, { "epoch": 0.012236781726405955, "grad_norm": 60.247152435132286, "learning_rate": 4.057724957555178e-05, "loss": 2.7339, "step": 240 }, { "epoch": 0.01274664763167287, "grad_norm": 126.69897963259963, "learning_rate": 4.227504244482173e-05, "loss": 2.5434, "step": 250 }, { "epoch": 0.013256513536939785, "grad_norm": 71.30801147824252, "learning_rate": 4.3972835314091685e-05, "loss": 2.5203, "step": 260 }, { "epoch": 0.013766379442206699, "grad_norm": 54.55308844523217, "learning_rate": 4.567062818336163e-05, "loss": 2.4222, "step": 270 }, { "epoch": 0.014276245347473614, "grad_norm": 36.84656998948245, "learning_rate": 4.736842105263158e-05, "loss": 2.3382, "step": 280 }, { "epoch": 0.01478611125274053, "grad_norm": 54.17242333918137, "learning_rate": 4.9066213921901536e-05, "loss": 2.295, "step": 290 }, { "epoch": 0.015295977158007444, "grad_norm": 64.90929737356474, "learning_rate": 5.0764006791171484e-05, "loss": 2.2441, "step": 300 }, { "epoch": 0.015805843063274358, "grad_norm": 46.816781644417524, "learning_rate": 5.2461799660441425e-05, "loss": 2.1804, "step": 310 }, { "epoch": 0.016315708968541273, "grad_norm": 56.46868292969693, "learning_rate": 5.415959252971138e-05, "loss": 2.1558, "step": 320 }, { "epoch": 0.01682557487380819, "grad_norm": 48.224835141523755, "learning_rate": 5.5857385398981335e-05, "loss": 2.1064, "step": 330 }, { "epoch": 0.017335440779075104, "grad_norm": 43.63841495963787, "learning_rate": 5.7555178268251276e-05, "loss": 2.1382, "step": 340 }, { "epoch": 0.01784530668434202, "grad_norm": 44.816260186893615, "learning_rate": 5.925297113752123e-05, "loss": 2.0626, "step": 350 }, { "epoch": 0.018355172589608934, "grad_norm": 35.749490569385365, "learning_rate": 6.095076400679117e-05, "loss": 2.0319, "step": 360 }, { "epoch": 0.01886503849487585, "grad_norm": 36.955674387001245, "learning_rate": 6.264855687606112e-05, "loss": 2.0229, "step": 370 }, { "epoch": 0.01937490440014276, "grad_norm": 34.68228399222833, "learning_rate": 6.434634974533107e-05, "loss": 1.9807, "step": 380 }, { "epoch": 0.019884770305409676, "grad_norm": 39.21220369968134, "learning_rate": 6.604414261460103e-05, "loss": 1.8996, "step": 390 }, { "epoch": 0.02039463621067659, "grad_norm": 63.36896641437639, "learning_rate": 6.774193548387096e-05, "loss": 1.8744, "step": 400 }, { "epoch": 0.020904502115943507, "grad_norm": 35.410707083894266, "learning_rate": 6.943972835314093e-05, "loss": 1.883, "step": 410 }, { "epoch": 0.021414368021210422, "grad_norm": 30.989701961084524, "learning_rate": 7.113752122241087e-05, "loss": 1.8126, "step": 420 }, { "epoch": 0.021924233926477337, "grad_norm": 36.11852060166006, "learning_rate": 7.283531409168082e-05, "loss": 1.931, "step": 430 }, { "epoch": 0.022434099831744252, "grad_norm": 34.26327225869466, "learning_rate": 7.453310696095077e-05, "loss": 1.8503, "step": 440 }, { "epoch": 0.022943965737011168, "grad_norm": 35.512191524471405, "learning_rate": 7.623089983022072e-05, "loss": 1.7695, "step": 450 }, { "epoch": 0.02345383164227808, "grad_norm": 30.89105132115667, "learning_rate": 7.792869269949067e-05, "loss": 1.7874, "step": 460 }, { "epoch": 0.023963697547544995, "grad_norm": 30.637291014545614, "learning_rate": 7.962648556876061e-05, "loss": 1.8044, "step": 470 }, { "epoch": 0.02447356345281191, "grad_norm": 23.622670649093685, "learning_rate": 8.132427843803056e-05, "loss": 1.7816, "step": 480 }, { "epoch": 0.024983429358078825, "grad_norm": 30.805248984521565, "learning_rate": 8.302207130730051e-05, "loss": 1.7745, "step": 490 }, { "epoch": 0.02549329526334574, "grad_norm": 28.992150209438435, "learning_rate": 8.471986417657046e-05, "loss": 1.8071, "step": 500 }, { "epoch": 0.026003161168612655, "grad_norm": 32.17766281670865, "learning_rate": 8.641765704584042e-05, "loss": 1.735, "step": 510 }, { "epoch": 0.02651302707387957, "grad_norm": 22.604114563009052, "learning_rate": 8.811544991511035e-05, "loss": 1.8334, "step": 520 }, { "epoch": 0.027022892979146486, "grad_norm": 22.175478912262538, "learning_rate": 8.981324278438032e-05, "loss": 1.659, "step": 530 }, { "epoch": 0.027532758884413398, "grad_norm": 29.575964324637216, "learning_rate": 9.151103565365025e-05, "loss": 1.6867, "step": 540 }, { "epoch": 0.028042624789680313, "grad_norm": 23.576640350398364, "learning_rate": 9.320882852292021e-05, "loss": 1.7008, "step": 550 }, { "epoch": 0.028552490694947228, "grad_norm": 37.51998597499104, "learning_rate": 9.490662139219015e-05, "loss": 1.7211, "step": 560 }, { "epoch": 0.029062356600214143, "grad_norm": 15.417959281812669, "learning_rate": 9.660441426146011e-05, "loss": 1.6255, "step": 570 }, { "epoch": 0.02957222250548106, "grad_norm": 22.186410865841005, "learning_rate": 9.830220713073006e-05, "loss": 1.6677, "step": 580 }, { "epoch": 0.030082088410747974, "grad_norm": 28.48049006276817, "learning_rate": 0.0001, "loss": 1.6622, "step": 590 }, { "epoch": 0.03059195431601489, "grad_norm": 20.16062115399988, "learning_rate": 9.999993182329924e-05, "loss": 1.6327, "step": 600 }, { "epoch": 0.031101820221281804, "grad_norm": 20.503281569928266, "learning_rate": 9.999972729338288e-05, "loss": 1.6157, "step": 610 }, { "epoch": 0.031611686126548716, "grad_norm": 16.081250504970278, "learning_rate": 9.99993864108087e-05, "loss": 1.6535, "step": 620 }, { "epoch": 0.03212155203181563, "grad_norm": 23.03873979953226, "learning_rate": 9.999890917650628e-05, "loss": 1.5929, "step": 630 }, { "epoch": 0.032631417937082546, "grad_norm": 17.488947087447386, "learning_rate": 9.999829559177709e-05, "loss": 1.5407, "step": 640 }, { "epoch": 0.03314128384234946, "grad_norm": 20.532891681871146, "learning_rate": 9.999754565829444e-05, "loss": 1.6118, "step": 650 }, { "epoch": 0.03365114974761638, "grad_norm": 24.990656263548626, "learning_rate": 9.999665937810341e-05, "loss": 1.6121, "step": 660 }, { "epoch": 0.03416101565288329, "grad_norm": 18.93753558506703, "learning_rate": 9.999563675362096e-05, "loss": 1.59, "step": 670 }, { "epoch": 0.03467088155815021, "grad_norm": 14.663441382121054, "learning_rate": 9.999447778763585e-05, "loss": 1.5577, "step": 680 }, { "epoch": 0.03518074746341712, "grad_norm": 18.757298128213563, "learning_rate": 9.999318248330867e-05, "loss": 1.5638, "step": 690 }, { "epoch": 0.03569061336868404, "grad_norm": 18.97335326579308, "learning_rate": 9.999175084417182e-05, "loss": 1.5323, "step": 700 }, { "epoch": 0.03620047927395095, "grad_norm": 16.333532400542737, "learning_rate": 9.999018287412945e-05, "loss": 1.596, "step": 710 }, { "epoch": 0.03671034517921787, "grad_norm": 12.898417787895172, "learning_rate": 9.998847857745752e-05, "loss": 1.6924, "step": 720 }, { "epoch": 0.03722021108448478, "grad_norm": 15.078759025875287, "learning_rate": 9.998663795880377e-05, "loss": 1.5784, "step": 730 }, { "epoch": 0.0377300769897517, "grad_norm": 25.78359047701089, "learning_rate": 9.998466102318771e-05, "loss": 1.5583, "step": 740 }, { "epoch": 0.03823994289501861, "grad_norm": 13.495256766439768, "learning_rate": 9.998254777600056e-05, "loss": 1.5748, "step": 750 }, { "epoch": 0.03874980880028552, "grad_norm": 25.54450279200201, "learning_rate": 9.998029822300528e-05, "loss": 1.4746, "step": 760 }, { "epoch": 0.03925967470555244, "grad_norm": 16.607989604937515, "learning_rate": 9.997791237033657e-05, "loss": 1.5983, "step": 770 }, { "epoch": 0.03976954061081935, "grad_norm": 13.846076340497213, "learning_rate": 9.997539022450082e-05, "loss": 1.5098, "step": 780 }, { "epoch": 0.04027940651608627, "grad_norm": 17.303687104171345, "learning_rate": 9.997273179237606e-05, "loss": 1.5409, "step": 790 }, { "epoch": 0.04078927242135318, "grad_norm": 18.449418144738164, "learning_rate": 9.996993708121206e-05, "loss": 1.5081, "step": 800 }, { "epoch": 0.0412991383266201, "grad_norm": 14.303705630794529, "learning_rate": 9.996700609863013e-05, "loss": 1.5393, "step": 810 }, { "epoch": 0.04180900423188701, "grad_norm": 17.238984622051, "learning_rate": 9.996393885262332e-05, "loss": 1.5065, "step": 820 }, { "epoch": 0.04231887013715393, "grad_norm": 12.343117780150777, "learning_rate": 9.996073535155618e-05, "loss": 1.459, "step": 830 }, { "epoch": 0.042828736042420844, "grad_norm": 18.09183197481867, "learning_rate": 9.995739560416488e-05, "loss": 1.5167, "step": 840 }, { "epoch": 0.04333860194768776, "grad_norm": 11.840493736268275, "learning_rate": 9.995391961955715e-05, "loss": 1.4625, "step": 850 }, { "epoch": 0.043848467852954674, "grad_norm": 11.51780571201556, "learning_rate": 9.995030740721225e-05, "loss": 1.4507, "step": 860 }, { "epoch": 0.04435833375822159, "grad_norm": 13.036311222953925, "learning_rate": 9.994655897698088e-05, "loss": 1.405, "step": 870 }, { "epoch": 0.044868199663488505, "grad_norm": 19.11790748949296, "learning_rate": 9.994267433908533e-05, "loss": 1.5213, "step": 880 }, { "epoch": 0.04537806556875542, "grad_norm": 11.616955086783236, "learning_rate": 9.993865350411922e-05, "loss": 1.4358, "step": 890 }, { "epoch": 0.045887931474022335, "grad_norm": 15.202800317578177, "learning_rate": 9.993449648304765e-05, "loss": 1.4656, "step": 900 }, { "epoch": 0.04639779737928925, "grad_norm": 10.861336191036171, "learning_rate": 9.99302032872071e-05, "loss": 1.428, "step": 910 }, { "epoch": 0.04690766328455616, "grad_norm": 13.201777890271243, "learning_rate": 9.992577392830542e-05, "loss": 1.4195, "step": 920 }, { "epoch": 0.047417529189823074, "grad_norm": 9.178791009234759, "learning_rate": 9.992120841842177e-05, "loss": 1.4354, "step": 930 }, { "epoch": 0.04792739509508999, "grad_norm": 15.66628336799611, "learning_rate": 9.99165067700066e-05, "loss": 1.3938, "step": 940 }, { "epoch": 0.048437261000356904, "grad_norm": 11.59188910926961, "learning_rate": 9.991166899588163e-05, "loss": 1.4134, "step": 950 }, { "epoch": 0.04894712690562382, "grad_norm": 10.752174542049293, "learning_rate": 9.99066951092398e-05, "loss": 1.3447, "step": 960 }, { "epoch": 0.049456992810890735, "grad_norm": 12.322110274926507, "learning_rate": 9.990158512364522e-05, "loss": 1.4129, "step": 970 }, { "epoch": 0.04996685871615765, "grad_norm": 11.447428766261394, "learning_rate": 9.989633905303319e-05, "loss": 1.4276, "step": 980 }, { "epoch": 0.050476724621424565, "grad_norm": 12.852124943645142, "learning_rate": 9.98909569117101e-05, "loss": 1.3558, "step": 990 }, { "epoch": 0.05098659052669148, "grad_norm": 14.822190112357378, "learning_rate": 9.98854387143534e-05, "loss": 1.4623, "step": 1000 }, { "epoch": 0.051496456431958396, "grad_norm": 15.515256851316536, "learning_rate": 9.98797844760116e-05, "loss": 1.4883, "step": 1010 }, { "epoch": 0.05200632233722531, "grad_norm": 9.567405011779273, "learning_rate": 9.987399421210419e-05, "loss": 1.4434, "step": 1020 }, { "epoch": 0.052516188242492226, "grad_norm": 9.256164671608484, "learning_rate": 9.986806793842163e-05, "loss": 1.3478, "step": 1030 }, { "epoch": 0.05302605414775914, "grad_norm": 17.070804354637016, "learning_rate": 9.986200567112523e-05, "loss": 1.3341, "step": 1040 }, { "epoch": 0.053535920053026056, "grad_norm": 14.973890022995453, "learning_rate": 9.985580742674725e-05, "loss": 1.4138, "step": 1050 }, { "epoch": 0.05404578595829297, "grad_norm": 9.98151039457384, "learning_rate": 9.98494732221907e-05, "loss": 1.4348, "step": 1060 }, { "epoch": 0.05455565186355989, "grad_norm": 9.213280784853575, "learning_rate": 9.98430030747294e-05, "loss": 1.3649, "step": 1070 }, { "epoch": 0.055065517768826795, "grad_norm": 11.512319636509554, "learning_rate": 9.983639700200786e-05, "loss": 1.4215, "step": 1080 }, { "epoch": 0.05557538367409371, "grad_norm": 12.375853723369742, "learning_rate": 9.982965502204131e-05, "loss": 1.3548, "step": 1090 }, { "epoch": 0.056085249579360626, "grad_norm": 16.63336865073922, "learning_rate": 9.98227771532156e-05, "loss": 1.3482, "step": 1100 }, { "epoch": 0.05659511548462754, "grad_norm": 11.640203752023153, "learning_rate": 9.98157634142871e-05, "loss": 1.3411, "step": 1110 }, { "epoch": 0.057104981389894456, "grad_norm": 9.904873969510755, "learning_rate": 9.98086138243828e-05, "loss": 1.3865, "step": 1120 }, { "epoch": 0.05761484729516137, "grad_norm": 6.885520749380552, "learning_rate": 9.98013284030001e-05, "loss": 1.3043, "step": 1130 }, { "epoch": 0.05812471320042829, "grad_norm": 7.508737754108616, "learning_rate": 9.979390717000682e-05, "loss": 1.4498, "step": 1140 }, { "epoch": 0.0586345791056952, "grad_norm": 12.596890247499731, "learning_rate": 9.978635014564121e-05, "loss": 1.3844, "step": 1150 }, { "epoch": 0.05914444501096212, "grad_norm": 14.716717678438053, "learning_rate": 9.977865735051176e-05, "loss": 1.2998, "step": 1160 }, { "epoch": 0.05965431091622903, "grad_norm": 8.86525041475244, "learning_rate": 9.977082880559725e-05, "loss": 1.4236, "step": 1170 }, { "epoch": 0.06016417682149595, "grad_norm": 8.438445931523896, "learning_rate": 9.976286453224665e-05, "loss": 1.3377, "step": 1180 }, { "epoch": 0.06067404272676286, "grad_norm": 11.987494578604688, "learning_rate": 9.975476455217909e-05, "loss": 1.3396, "step": 1190 }, { "epoch": 0.06118390863202978, "grad_norm": 8.508524307495275, "learning_rate": 9.974652888748377e-05, "loss": 1.3562, "step": 1200 }, { "epoch": 0.06169377453729669, "grad_norm": 10.139996913816189, "learning_rate": 9.973815756061988e-05, "loss": 1.3081, "step": 1210 }, { "epoch": 0.06220364044256361, "grad_norm": 8.893072723924112, "learning_rate": 9.972965059441662e-05, "loss": 1.3785, "step": 1220 }, { "epoch": 0.06271350634783052, "grad_norm": 13.144137366868113, "learning_rate": 9.972100801207306e-05, "loss": 1.3559, "step": 1230 }, { "epoch": 0.06322337225309743, "grad_norm": 7.993631325592355, "learning_rate": 9.97122298371581e-05, "loss": 1.2776, "step": 1240 }, { "epoch": 0.06373323815836435, "grad_norm": 8.103661114339973, "learning_rate": 9.970331609361045e-05, "loss": 1.2851, "step": 1250 }, { "epoch": 0.06424310406363126, "grad_norm": 7.326903132891211, "learning_rate": 9.969426680573847e-05, "loss": 1.3169, "step": 1260 }, { "epoch": 0.06475296996889818, "grad_norm": 11.920682176684714, "learning_rate": 9.968508199822018e-05, "loss": 1.3331, "step": 1270 }, { "epoch": 0.06526283587416509, "grad_norm": 6.350983744050441, "learning_rate": 9.967576169610319e-05, "loss": 1.3217, "step": 1280 }, { "epoch": 0.06577270177943201, "grad_norm": 8.119062253184078, "learning_rate": 9.966630592480459e-05, "loss": 1.2693, "step": 1290 }, { "epoch": 0.06628256768469892, "grad_norm": 10.81943999835483, "learning_rate": 9.965671471011091e-05, "loss": 1.3865, "step": 1300 }, { "epoch": 0.06679243358996584, "grad_norm": 8.591767358572096, "learning_rate": 9.964698807817805e-05, "loss": 1.3059, "step": 1310 }, { "epoch": 0.06730229949523275, "grad_norm": 6.884677751275266, "learning_rate": 9.963712605553119e-05, "loss": 1.3225, "step": 1320 }, { "epoch": 0.06781216540049967, "grad_norm": 8.127541110021461, "learning_rate": 9.962712866906476e-05, "loss": 1.2426, "step": 1330 }, { "epoch": 0.06832203130576658, "grad_norm": 9.497739154579854, "learning_rate": 9.961699594604227e-05, "loss": 1.3854, "step": 1340 }, { "epoch": 0.0688318972110335, "grad_norm": 8.234529683565784, "learning_rate": 9.96067279140964e-05, "loss": 1.3541, "step": 1350 }, { "epoch": 0.06934176311630041, "grad_norm": 14.121817799847086, "learning_rate": 9.959632460122872e-05, "loss": 1.288, "step": 1360 }, { "epoch": 0.06985162902156733, "grad_norm": 8.300552320933303, "learning_rate": 9.958578603580981e-05, "loss": 1.2946, "step": 1370 }, { "epoch": 0.07036149492683424, "grad_norm": 11.281514779427111, "learning_rate": 9.957511224657902e-05, "loss": 1.3188, "step": 1380 }, { "epoch": 0.07087136083210116, "grad_norm": 8.401565431210939, "learning_rate": 9.956430326264453e-05, "loss": 1.3062, "step": 1390 }, { "epoch": 0.07138122673736808, "grad_norm": 13.395221223404873, "learning_rate": 9.955335911348316e-05, "loss": 1.3546, "step": 1400 }, { "epoch": 0.07189109264263499, "grad_norm": 9.531692368877174, "learning_rate": 9.954227982894034e-05, "loss": 1.2832, "step": 1410 }, { "epoch": 0.0724009585479019, "grad_norm": 9.661709751824839, "learning_rate": 9.953106543923005e-05, "loss": 1.3729, "step": 1420 }, { "epoch": 0.07291082445316882, "grad_norm": 8.059671565620953, "learning_rate": 9.95197159749347e-05, "loss": 1.3321, "step": 1430 }, { "epoch": 0.07342069035843574, "grad_norm": 7.257477642996525, "learning_rate": 9.950823146700502e-05, "loss": 1.3132, "step": 1440 }, { "epoch": 0.07393055626370265, "grad_norm": 13.079956490702457, "learning_rate": 9.949661194676008e-05, "loss": 1.2903, "step": 1450 }, { "epoch": 0.07444042216896957, "grad_norm": 9.797908918752556, "learning_rate": 9.948485744588709e-05, "loss": 1.2766, "step": 1460 }, { "epoch": 0.07495028807423648, "grad_norm": 8.8963959777427, "learning_rate": 9.947296799644136e-05, "loss": 1.3563, "step": 1470 }, { "epoch": 0.0754601539795034, "grad_norm": 6.024664773794652, "learning_rate": 9.946094363084623e-05, "loss": 1.3051, "step": 1480 }, { "epoch": 0.07597001988477031, "grad_norm": 15.083381217497958, "learning_rate": 9.944878438189297e-05, "loss": 1.2957, "step": 1490 }, { "epoch": 0.07647988579003721, "grad_norm": 8.659028930325837, "learning_rate": 9.943649028274069e-05, "loss": 1.2402, "step": 1500 }, { "epoch": 0.07698975169530413, "grad_norm": 8.483400054066859, "learning_rate": 9.942406136691622e-05, "loss": 1.3009, "step": 1510 }, { "epoch": 0.07749961760057104, "grad_norm": 9.04153849731825, "learning_rate": 9.941149766831408e-05, "loss": 1.2704, "step": 1520 }, { "epoch": 0.07800948350583796, "grad_norm": 6.685613160598831, "learning_rate": 9.939879922119628e-05, "loss": 1.2806, "step": 1530 }, { "epoch": 0.07851934941110487, "grad_norm": 8.748668860247195, "learning_rate": 9.938596606019241e-05, "loss": 1.3032, "step": 1540 }, { "epoch": 0.07902921531637179, "grad_norm": 10.516882054179344, "learning_rate": 9.937299822029933e-05, "loss": 1.2432, "step": 1550 }, { "epoch": 0.0795390812216387, "grad_norm": 8.012469795551912, "learning_rate": 9.935989573688124e-05, "loss": 1.2907, "step": 1560 }, { "epoch": 0.08004894712690562, "grad_norm": 7.620672875381667, "learning_rate": 9.93466586456695e-05, "loss": 1.2428, "step": 1570 }, { "epoch": 0.08055881303217254, "grad_norm": 9.779429555978018, "learning_rate": 9.933328698276257e-05, "loss": 1.2466, "step": 1580 }, { "epoch": 0.08106867893743945, "grad_norm": 7.3320937475403065, "learning_rate": 9.931978078462587e-05, "loss": 1.2906, "step": 1590 }, { "epoch": 0.08157854484270637, "grad_norm": 6.794977736972588, "learning_rate": 9.930614008809171e-05, "loss": 1.2783, "step": 1600 }, { "epoch": 0.08208841074797328, "grad_norm": 5.641899938681774, "learning_rate": 9.929236493035923e-05, "loss": 1.2865, "step": 1610 }, { "epoch": 0.0825982766532402, "grad_norm": 6.976480247465172, "learning_rate": 9.927845534899421e-05, "loss": 1.2555, "step": 1620 }, { "epoch": 0.08310814255850711, "grad_norm": 8.90128363726804, "learning_rate": 9.9264411381929e-05, "loss": 1.2581, "step": 1630 }, { "epoch": 0.08361800846377403, "grad_norm": 8.540712581579276, "learning_rate": 9.925023306746248e-05, "loss": 1.2367, "step": 1640 }, { "epoch": 0.08412787436904094, "grad_norm": 7.488693574086519, "learning_rate": 9.923592044425987e-05, "loss": 1.2339, "step": 1650 }, { "epoch": 0.08463774027430786, "grad_norm": 11.310513660070532, "learning_rate": 9.922147355135268e-05, "loss": 1.2386, "step": 1660 }, { "epoch": 0.08514760617957477, "grad_norm": 6.929383413556347, "learning_rate": 9.920689242813855e-05, "loss": 1.3621, "step": 1670 }, { "epoch": 0.08565747208484169, "grad_norm": 6.8453184213270175, "learning_rate": 9.91921771143812e-05, "loss": 1.2755, "step": 1680 }, { "epoch": 0.0861673379901086, "grad_norm": 13.353344035463461, "learning_rate": 9.917732765021028e-05, "loss": 1.2949, "step": 1690 }, { "epoch": 0.08667720389537552, "grad_norm": 9.043807181846674, "learning_rate": 9.916234407612132e-05, "loss": 1.2238, "step": 1700 }, { "epoch": 0.08718706980064243, "grad_norm": 7.719464446952598, "learning_rate": 9.914722643297554e-05, "loss": 1.2437, "step": 1710 }, { "epoch": 0.08769693570590935, "grad_norm": 8.332761022366908, "learning_rate": 9.913197476199975e-05, "loss": 1.263, "step": 1720 }, { "epoch": 0.08820680161117626, "grad_norm": 7.22775493700009, "learning_rate": 9.911658910478633e-05, "loss": 1.2445, "step": 1730 }, { "epoch": 0.08871666751644318, "grad_norm": 6.704298697609803, "learning_rate": 9.910106950329299e-05, "loss": 1.2215, "step": 1740 }, { "epoch": 0.0892265334217101, "grad_norm": 6.444382157095585, "learning_rate": 9.908541599984276e-05, "loss": 1.2642, "step": 1750 }, { "epoch": 0.08973639932697701, "grad_norm": 8.140001307541976, "learning_rate": 9.906962863712378e-05, "loss": 1.3316, "step": 1760 }, { "epoch": 0.09024626523224392, "grad_norm": 6.631859873444056, "learning_rate": 9.90537074581893e-05, "loss": 1.2087, "step": 1770 }, { "epoch": 0.09075613113751084, "grad_norm": 7.069643684676236, "learning_rate": 9.903765250645742e-05, "loss": 1.2332, "step": 1780 }, { "epoch": 0.09126599704277776, "grad_norm": 7.966200972241499, "learning_rate": 9.90214638257111e-05, "loss": 1.2684, "step": 1790 }, { "epoch": 0.09177586294804467, "grad_norm": 13.77541921252732, "learning_rate": 9.900514146009798e-05, "loss": 1.336, "step": 1800 }, { "epoch": 0.09228572885331159, "grad_norm": 7.187849329498884, "learning_rate": 9.898868545413027e-05, "loss": 1.1634, "step": 1810 }, { "epoch": 0.0927955947585785, "grad_norm": 5.352581001102315, "learning_rate": 9.897209585268458e-05, "loss": 1.1773, "step": 1820 }, { "epoch": 0.0933054606638454, "grad_norm": 6.0910356767398195, "learning_rate": 9.895537270100194e-05, "loss": 1.2267, "step": 1830 }, { "epoch": 0.09381532656911232, "grad_norm": 7.1256730647442, "learning_rate": 9.893851604468747e-05, "loss": 1.2155, "step": 1840 }, { "epoch": 0.09432519247437923, "grad_norm": 6.602537485307025, "learning_rate": 9.892152592971045e-05, "loss": 1.2193, "step": 1850 }, { "epoch": 0.09483505837964615, "grad_norm": 6.912697078340405, "learning_rate": 9.890440240240406e-05, "loss": 1.1686, "step": 1860 }, { "epoch": 0.09534492428491306, "grad_norm": 7.6604763356635575, "learning_rate": 9.888714550946533e-05, "loss": 1.2251, "step": 1870 }, { "epoch": 0.09585479019017998, "grad_norm": 6.787843708359536, "learning_rate": 9.886975529795499e-05, "loss": 1.2538, "step": 1880 }, { "epoch": 0.0963646560954469, "grad_norm": 6.779150065021871, "learning_rate": 9.885223181529733e-05, "loss": 1.249, "step": 1890 }, { "epoch": 0.09687452200071381, "grad_norm": 7.372475657608788, "learning_rate": 9.883457510928005e-05, "loss": 1.2259, "step": 1900 }, { "epoch": 0.09738438790598072, "grad_norm": 4.803215633000802, "learning_rate": 9.881678522805422e-05, "loss": 1.2417, "step": 1910 }, { "epoch": 0.09789425381124764, "grad_norm": 9.617047316762212, "learning_rate": 9.879886222013403e-05, "loss": 1.1683, "step": 1920 }, { "epoch": 0.09840411971651455, "grad_norm": 4.889685554429617, "learning_rate": 9.878080613439678e-05, "loss": 1.2142, "step": 1930 }, { "epoch": 0.09891398562178147, "grad_norm": 5.759379292691569, "learning_rate": 9.876261702008262e-05, "loss": 1.3001, "step": 1940 }, { "epoch": 0.09942385152704838, "grad_norm": 11.065505514698799, "learning_rate": 9.874429492679451e-05, "loss": 1.2414, "step": 1950 }, { "epoch": 0.0999337174323153, "grad_norm": 8.704800666475151, "learning_rate": 9.872583990449804e-05, "loss": 1.3282, "step": 1960 }, { "epoch": 0.10044358333758222, "grad_norm": 5.679982367500135, "learning_rate": 9.870725200352128e-05, "loss": 1.2443, "step": 1970 }, { "epoch": 0.10095344924284913, "grad_norm": 4.795540026840549, "learning_rate": 9.868853127455477e-05, "loss": 1.2672, "step": 1980 }, { "epoch": 0.10146331514811605, "grad_norm": 8.929783559827515, "learning_rate": 9.866967776865115e-05, "loss": 1.2542, "step": 1990 }, { "epoch": 0.10197318105338296, "grad_norm": 7.123523092691813, "learning_rate": 9.865069153722526e-05, "loss": 1.2183, "step": 2000 }, { "epoch": 0.10248304695864988, "grad_norm": 7.387728237482376, "learning_rate": 9.86315726320538e-05, "loss": 1.2772, "step": 2010 }, { "epoch": 0.10299291286391679, "grad_norm": 5.901476994470112, "learning_rate": 9.861232110527536e-05, "loss": 1.2196, "step": 2020 }, { "epoch": 0.1035027787691837, "grad_norm": 5.450704849288761, "learning_rate": 9.859293700939014e-05, "loss": 1.2247, "step": 2030 }, { "epoch": 0.10401264467445062, "grad_norm": 6.1351665538569815, "learning_rate": 9.85734203972599e-05, "loss": 1.1758, "step": 2040 }, { "epoch": 0.10452251057971754, "grad_norm": 5.804292361232884, "learning_rate": 9.855377132210776e-05, "loss": 1.213, "step": 2050 }, { "epoch": 0.10503237648498445, "grad_norm": 6.500703797902379, "learning_rate": 9.853398983751809e-05, "loss": 1.2294, "step": 2060 }, { "epoch": 0.10554224239025137, "grad_norm": 6.481871800852735, "learning_rate": 9.851407599743636e-05, "loss": 1.224, "step": 2070 }, { "epoch": 0.10605210829551828, "grad_norm": 8.157402438997709, "learning_rate": 9.849402985616894e-05, "loss": 1.2287, "step": 2080 }, { "epoch": 0.1065619742007852, "grad_norm": 7.468026418396616, "learning_rate": 9.847385146838304e-05, "loss": 1.2153, "step": 2090 }, { "epoch": 0.10707184010605211, "grad_norm": 7.796469259495127, "learning_rate": 9.845354088910648e-05, "loss": 1.3272, "step": 2100 }, { "epoch": 0.10758170601131903, "grad_norm": 6.931738313538142, "learning_rate": 9.843309817372761e-05, "loss": 1.201, "step": 2110 }, { "epoch": 0.10809157191658594, "grad_norm": 5.706345034804036, "learning_rate": 9.84125233779951e-05, "loss": 1.2303, "step": 2120 }, { "epoch": 0.10860143782185286, "grad_norm": 7.297092633764102, "learning_rate": 9.83918165580178e-05, "loss": 1.2992, "step": 2130 }, { "epoch": 0.10911130372711977, "grad_norm": 4.814644519151599, "learning_rate": 9.837097777026464e-05, "loss": 1.1572, "step": 2140 }, { "epoch": 0.10962116963238669, "grad_norm": 6.083844435532459, "learning_rate": 9.83500070715644e-05, "loss": 1.1952, "step": 2150 }, { "epoch": 0.11013103553765359, "grad_norm": 10.385157219191, "learning_rate": 9.83289045191056e-05, "loss": 1.2262, "step": 2160 }, { "epoch": 0.1106409014429205, "grad_norm": 5.819414632123794, "learning_rate": 9.830767017043635e-05, "loss": 1.2793, "step": 2170 }, { "epoch": 0.11115076734818742, "grad_norm": 5.843364388171365, "learning_rate": 9.828630408346415e-05, "loss": 1.2492, "step": 2180 }, { "epoch": 0.11166063325345434, "grad_norm": 7.036045897556753, "learning_rate": 9.826480631645578e-05, "loss": 1.2921, "step": 2190 }, { "epoch": 0.11217049915872125, "grad_norm": 11.830997847111623, "learning_rate": 9.824317692803711e-05, "loss": 1.2078, "step": 2200 }, { "epoch": 0.11268036506398817, "grad_norm": 8.258449793970065, "learning_rate": 9.822141597719294e-05, "loss": 1.1975, "step": 2210 }, { "epoch": 0.11319023096925508, "grad_norm": 7.478565780074843, "learning_rate": 9.819952352326689e-05, "loss": 1.2081, "step": 2220 }, { "epoch": 0.113700096874522, "grad_norm": 8.192782392249146, "learning_rate": 9.817749962596115e-05, "loss": 1.2213, "step": 2230 }, { "epoch": 0.11420996277978891, "grad_norm": 8.384820102991455, "learning_rate": 9.81553443453364e-05, "loss": 1.2237, "step": 2240 }, { "epoch": 0.11471982868505583, "grad_norm": 5.245831823269501, "learning_rate": 9.81330577418116e-05, "loss": 1.1818, "step": 2250 }, { "epoch": 0.11522969459032274, "grad_norm": 4.777949798758605, "learning_rate": 9.811063987616381e-05, "loss": 1.1329, "step": 2260 }, { "epoch": 0.11573956049558966, "grad_norm": 7.1330868631591295, "learning_rate": 9.80880908095281e-05, "loss": 1.2497, "step": 2270 }, { "epoch": 0.11624942640085657, "grad_norm": 6.643681935564405, "learning_rate": 9.806541060339729e-05, "loss": 1.2394, "step": 2280 }, { "epoch": 0.11675929230612349, "grad_norm": 5.145381571255945, "learning_rate": 9.804259931962188e-05, "loss": 1.187, "step": 2290 }, { "epoch": 0.1172691582113904, "grad_norm": 10.41803650650462, "learning_rate": 9.801965702040974e-05, "loss": 1.245, "step": 2300 }, { "epoch": 0.11777902411665732, "grad_norm": 5.4345422498522, "learning_rate": 9.799658376832612e-05, "loss": 1.2012, "step": 2310 }, { "epoch": 0.11828889002192423, "grad_norm": 5.334062865929928, "learning_rate": 9.797337962629334e-05, "loss": 1.1432, "step": 2320 }, { "epoch": 0.11879875592719115, "grad_norm": 6.099120103735033, "learning_rate": 9.795004465759065e-05, "loss": 1.2238, "step": 2330 }, { "epoch": 0.11930862183245806, "grad_norm": 7.914833667111485, "learning_rate": 9.792657892585413e-05, "loss": 1.1527, "step": 2340 }, { "epoch": 0.11981848773772498, "grad_norm": 5.197372483584429, "learning_rate": 9.790298249507641e-05, "loss": 1.1935, "step": 2350 }, { "epoch": 0.1203283536429919, "grad_norm": 6.124732365741253, "learning_rate": 9.787925542960657e-05, "loss": 1.2419, "step": 2360 }, { "epoch": 0.12083821954825881, "grad_norm": 4.383681745236697, "learning_rate": 9.785539779414993e-05, "loss": 1.2306, "step": 2370 }, { "epoch": 0.12134808545352573, "grad_norm": 5.815760094144063, "learning_rate": 9.783140965376789e-05, "loss": 1.1445, "step": 2380 }, { "epoch": 0.12185795135879264, "grad_norm": 5.741541468529939, "learning_rate": 9.780729107387772e-05, "loss": 1.1961, "step": 2390 }, { "epoch": 0.12236781726405956, "grad_norm": 6.030456606909413, "learning_rate": 9.778304212025244e-05, "loss": 1.1887, "step": 2400 }, { "epoch": 0.12287768316932647, "grad_norm": 5.911071102496239, "learning_rate": 9.77586628590206e-05, "loss": 1.1322, "step": 2410 }, { "epoch": 0.12338754907459339, "grad_norm": 5.420205050934226, "learning_rate": 9.773415335666611e-05, "loss": 1.1774, "step": 2420 }, { "epoch": 0.1238974149798603, "grad_norm": 6.720049848717211, "learning_rate": 9.770951368002803e-05, "loss": 1.1409, "step": 2430 }, { "epoch": 0.12440728088512722, "grad_norm": 9.256546417665346, "learning_rate": 9.768474389630046e-05, "loss": 1.18, "step": 2440 }, { "epoch": 0.12491714679039413, "grad_norm": 7.111505210888634, "learning_rate": 9.765984407303226e-05, "loss": 1.2464, "step": 2450 }, { "epoch": 0.12542701269566103, "grad_norm": 4.694572996739052, "learning_rate": 9.763481427812696e-05, "loss": 1.2375, "step": 2460 }, { "epoch": 0.12593687860092795, "grad_norm": 4.4336348668679975, "learning_rate": 9.76096545798425e-05, "loss": 1.3042, "step": 2470 }, { "epoch": 0.12644674450619486, "grad_norm": 4.625989023877044, "learning_rate": 9.75843650467911e-05, "loss": 1.1504, "step": 2480 }, { "epoch": 0.12695661041146178, "grad_norm": 7.90799206426825, "learning_rate": 9.755894574793905e-05, "loss": 1.1228, "step": 2490 }, { "epoch": 0.1274664763167287, "grad_norm": 6.704492006114675, "learning_rate": 9.753339675260647e-05, "loss": 1.1919, "step": 2500 }, { "epoch": 0.1279763422219956, "grad_norm": 6.942236866139156, "learning_rate": 9.750771813046722e-05, "loss": 1.2088, "step": 2510 }, { "epoch": 0.12848620812726252, "grad_norm": 4.322875961018973, "learning_rate": 9.748190995154868e-05, "loss": 1.1658, "step": 2520 }, { "epoch": 0.12899607403252944, "grad_norm": 6.218605296975476, "learning_rate": 9.745597228623147e-05, "loss": 1.1418, "step": 2530 }, { "epoch": 0.12950593993779635, "grad_norm": 5.6305980267618185, "learning_rate": 9.74299052052494e-05, "loss": 1.1694, "step": 2540 }, { "epoch": 0.13001580584306327, "grad_norm": 10.313836811735246, "learning_rate": 9.740370877968916e-05, "loss": 1.1807, "step": 2550 }, { "epoch": 0.13052567174833019, "grad_norm": 7.647262183219847, "learning_rate": 9.737738308099017e-05, "loss": 1.1667, "step": 2560 }, { "epoch": 0.1310355376535971, "grad_norm": 5.2019357519805425, "learning_rate": 9.735092818094442e-05, "loss": 1.227, "step": 2570 }, { "epoch": 0.13154540355886402, "grad_norm": 5.765131994822294, "learning_rate": 9.732434415169623e-05, "loss": 1.1175, "step": 2580 }, { "epoch": 0.13205526946413093, "grad_norm": 4.071134055352691, "learning_rate": 9.729763106574202e-05, "loss": 1.0949, "step": 2590 }, { "epoch": 0.13256513536939785, "grad_norm": 7.880998877266062, "learning_rate": 9.727078899593023e-05, "loss": 1.165, "step": 2600 }, { "epoch": 0.13307500127466476, "grad_norm": 8.522815149585488, "learning_rate": 9.7243818015461e-05, "loss": 1.1394, "step": 2610 }, { "epoch": 0.13358486717993168, "grad_norm": 4.210313288723103, "learning_rate": 9.721671819788602e-05, "loss": 1.1522, "step": 2620 }, { "epoch": 0.1340947330851986, "grad_norm": 7.169891677668386, "learning_rate": 9.718948961710834e-05, "loss": 1.1513, "step": 2630 }, { "epoch": 0.1346045989904655, "grad_norm": 11.212825964523153, "learning_rate": 9.716213234738215e-05, "loss": 1.1749, "step": 2640 }, { "epoch": 0.13511446489573242, "grad_norm": 6.8565562989741355, "learning_rate": 9.713464646331259e-05, "loss": 1.2062, "step": 2650 }, { "epoch": 0.13562433080099934, "grad_norm": 8.87197126878053, "learning_rate": 9.710703203985554e-05, "loss": 1.1817, "step": 2660 }, { "epoch": 0.13613419670626625, "grad_norm": 5.93024626159216, "learning_rate": 9.707928915231742e-05, "loss": 1.1203, "step": 2670 }, { "epoch": 0.13664406261153317, "grad_norm": 5.249617830158715, "learning_rate": 9.705141787635493e-05, "loss": 1.1743, "step": 2680 }, { "epoch": 0.13715392851680008, "grad_norm": 4.986172758637716, "learning_rate": 9.702341828797497e-05, "loss": 1.1499, "step": 2690 }, { "epoch": 0.137663794422067, "grad_norm": 8.106695311484067, "learning_rate": 9.699529046353433e-05, "loss": 1.2356, "step": 2700 }, { "epoch": 0.1381736603273339, "grad_norm": 5.122080533809294, "learning_rate": 9.696703447973947e-05, "loss": 1.2141, "step": 2710 }, { "epoch": 0.13868352623260083, "grad_norm": 4.891418222378375, "learning_rate": 9.69386504136464e-05, "loss": 1.1277, "step": 2720 }, { "epoch": 0.13919339213786774, "grad_norm": 5.552791497724159, "learning_rate": 9.691013834266041e-05, "loss": 1.1838, "step": 2730 }, { "epoch": 0.13970325804313466, "grad_norm": 6.05967674916367, "learning_rate": 9.688149834453584e-05, "loss": 1.1281, "step": 2740 }, { "epoch": 0.14021312394840157, "grad_norm": 7.952542574514654, "learning_rate": 9.685273049737591e-05, "loss": 1.1558, "step": 2750 }, { "epoch": 0.1407229898536685, "grad_norm": 5.6955985155690945, "learning_rate": 9.682383487963249e-05, "loss": 1.1043, "step": 2760 }, { "epoch": 0.1412328557589354, "grad_norm": 6.693711350896108, "learning_rate": 9.679481157010593e-05, "loss": 1.1673, "step": 2770 }, { "epoch": 0.14174272166420232, "grad_norm": 5.052952434134367, "learning_rate": 9.676566064794473e-05, "loss": 1.21, "step": 2780 }, { "epoch": 0.14225258756946924, "grad_norm": 4.609882531018443, "learning_rate": 9.673638219264547e-05, "loss": 1.1938, "step": 2790 }, { "epoch": 0.14276245347473615, "grad_norm": 7.234811448945305, "learning_rate": 9.670697628405248e-05, "loss": 1.1709, "step": 2800 }, { "epoch": 0.14327231938000307, "grad_norm": 5.177929762439721, "learning_rate": 9.667744300235765e-05, "loss": 1.2318, "step": 2810 }, { "epoch": 0.14378218528526998, "grad_norm": 4.736379267462622, "learning_rate": 9.664778242810027e-05, "loss": 1.1854, "step": 2820 }, { "epoch": 0.1442920511905369, "grad_norm": 5.465873014267871, "learning_rate": 9.661799464216674e-05, "loss": 1.1519, "step": 2830 }, { "epoch": 0.1448019170958038, "grad_norm": 4.8161346948916774, "learning_rate": 9.658807972579039e-05, "loss": 1.1829, "step": 2840 }, { "epoch": 0.14531178300107073, "grad_norm": 4.313950922209564, "learning_rate": 9.655803776055122e-05, "loss": 1.1124, "step": 2850 }, { "epoch": 0.14582164890633764, "grad_norm": 7.772690723847887, "learning_rate": 9.65278688283757e-05, "loss": 1.2594, "step": 2860 }, { "epoch": 0.14633151481160456, "grad_norm": 6.400952780656834, "learning_rate": 9.649757301153658e-05, "loss": 1.1976, "step": 2870 }, { "epoch": 0.14684138071687147, "grad_norm": 4.26178174000709, "learning_rate": 9.646715039265262e-05, "loss": 1.1369, "step": 2880 }, { "epoch": 0.1473512466221384, "grad_norm": 8.065544184447527, "learning_rate": 9.643660105468834e-05, "loss": 1.1366, "step": 2890 }, { "epoch": 0.1478611125274053, "grad_norm": 4.624654737377227, "learning_rate": 9.640592508095389e-05, "loss": 1.1888, "step": 2900 }, { "epoch": 0.14837097843267222, "grad_norm": 6.4881922231671, "learning_rate": 9.637512255510475e-05, "loss": 1.2, "step": 2910 }, { "epoch": 0.14888084433793913, "grad_norm": 7.571117972742807, "learning_rate": 9.634419356114146e-05, "loss": 1.1813, "step": 2920 }, { "epoch": 0.14939071024320605, "grad_norm": 4.0399323719696, "learning_rate": 9.631313818340951e-05, "loss": 1.108, "step": 2930 }, { "epoch": 0.14990057614847296, "grad_norm": 6.80892247075497, "learning_rate": 9.628195650659905e-05, "loss": 1.1393, "step": 2940 }, { "epoch": 0.15041044205373988, "grad_norm": 6.6413351681887605, "learning_rate": 9.62506486157446e-05, "loss": 1.1306, "step": 2950 }, { "epoch": 0.1509203079590068, "grad_norm": 4.866599278982188, "learning_rate": 9.621921459622494e-05, "loss": 1.1209, "step": 2960 }, { "epoch": 0.1514301738642737, "grad_norm": 5.195315531928738, "learning_rate": 9.618765453376275e-05, "loss": 1.1449, "step": 2970 }, { "epoch": 0.15194003976954062, "grad_norm": 5.985983405099398, "learning_rate": 9.615596851442448e-05, "loss": 1.151, "step": 2980 }, { "epoch": 0.1524499056748075, "grad_norm": 5.216800512033471, "learning_rate": 9.612415662462007e-05, "loss": 1.1354, "step": 2990 }, { "epoch": 0.15295977158007443, "grad_norm": 4.485321159625428, "learning_rate": 9.609221895110269e-05, "loss": 1.132, "step": 3000 }, { "epoch": 0.15346963748534134, "grad_norm": 4.795030590130624, "learning_rate": 9.606015558096857e-05, "loss": 1.0816, "step": 3010 }, { "epoch": 0.15397950339060826, "grad_norm": 8.191449302146102, "learning_rate": 9.602796660165667e-05, "loss": 1.1905, "step": 3020 }, { "epoch": 0.15448936929587517, "grad_norm": 3.277563070999204, "learning_rate": 9.599565210094856e-05, "loss": 1.1425, "step": 3030 }, { "epoch": 0.1549992352011421, "grad_norm": 4.295030375268278, "learning_rate": 9.596321216696807e-05, "loss": 1.184, "step": 3040 }, { "epoch": 0.155509101106409, "grad_norm": 4.705769445658949, "learning_rate": 9.59306468881811e-05, "loss": 1.1924, "step": 3050 }, { "epoch": 0.15601896701167592, "grad_norm": 4.185085976052072, "learning_rate": 9.589795635339538e-05, "loss": 1.1721, "step": 3060 }, { "epoch": 0.15652883291694283, "grad_norm": 4.361491323328995, "learning_rate": 9.586514065176022e-05, "loss": 1.1392, "step": 3070 }, { "epoch": 0.15703869882220975, "grad_norm": 4.528614562993759, "learning_rate": 9.583219987276629e-05, "loss": 1.094, "step": 3080 }, { "epoch": 0.15754856472747666, "grad_norm": 4.961731041882474, "learning_rate": 9.579913410624532e-05, "loss": 1.2076, "step": 3090 }, { "epoch": 0.15805843063274358, "grad_norm": 4.757780522134779, "learning_rate": 9.576594344236991e-05, "loss": 1.1451, "step": 3100 }, { "epoch": 0.1585682965380105, "grad_norm": 6.711264664750607, "learning_rate": 9.573262797165326e-05, "loss": 1.0809, "step": 3110 }, { "epoch": 0.1590781624432774, "grad_norm": 5.286703097650971, "learning_rate": 9.56991877849489e-05, "loss": 1.1819, "step": 3120 }, { "epoch": 0.15958802834854433, "grad_norm": 5.540430875450444, "learning_rate": 9.566562297345054e-05, "loss": 1.2123, "step": 3130 }, { "epoch": 0.16009789425381124, "grad_norm": 5.935180682491463, "learning_rate": 9.563193362869167e-05, "loss": 1.1159, "step": 3140 }, { "epoch": 0.16060776015907816, "grad_norm": 6.423069613572788, "learning_rate": 9.559811984254545e-05, "loss": 1.1342, "step": 3150 }, { "epoch": 0.16111762606434507, "grad_norm": 6.070748932230466, "learning_rate": 9.556418170722435e-05, "loss": 1.2179, "step": 3160 }, { "epoch": 0.16162749196961199, "grad_norm": 4.35911724617304, "learning_rate": 9.553011931527998e-05, "loss": 1.083, "step": 3170 }, { "epoch": 0.1621373578748789, "grad_norm": 3.7577113585154183, "learning_rate": 9.54959327596028e-05, "loss": 1.2139, "step": 3180 }, { "epoch": 0.16264722378014582, "grad_norm": 4.7730345910622, "learning_rate": 9.546162213342187e-05, "loss": 1.1606, "step": 3190 }, { "epoch": 0.16315708968541273, "grad_norm": 7.156654490620455, "learning_rate": 9.542718753030463e-05, "loss": 1.1637, "step": 3200 }, { "epoch": 0.16366695559067965, "grad_norm": 5.566030323225834, "learning_rate": 9.539262904415655e-05, "loss": 1.1034, "step": 3210 }, { "epoch": 0.16417682149594656, "grad_norm": 8.746375921889477, "learning_rate": 9.5357946769221e-05, "loss": 1.1711, "step": 3220 }, { "epoch": 0.16468668740121348, "grad_norm": 4.325955014504265, "learning_rate": 9.532314080007888e-05, "loss": 1.0815, "step": 3230 }, { "epoch": 0.1651965533064804, "grad_norm": 4.771056313254683, "learning_rate": 9.528821123164845e-05, "loss": 1.0926, "step": 3240 }, { "epoch": 0.1657064192117473, "grad_norm": 8.066446506417845, "learning_rate": 9.525315815918501e-05, "loss": 1.0949, "step": 3250 }, { "epoch": 0.16621628511701422, "grad_norm": 5.073637609519421, "learning_rate": 9.521798167828069e-05, "loss": 1.1235, "step": 3260 }, { "epoch": 0.16672615102228114, "grad_norm": 4.7462509373604975, "learning_rate": 9.518268188486412e-05, "loss": 1.1382, "step": 3270 }, { "epoch": 0.16723601692754805, "grad_norm": 5.252783379643778, "learning_rate": 9.514725887520026e-05, "loss": 1.0399, "step": 3280 }, { "epoch": 0.16774588283281497, "grad_norm": 3.9308583855377264, "learning_rate": 9.511171274589006e-05, "loss": 1.1328, "step": 3290 }, { "epoch": 0.16825574873808188, "grad_norm": 4.791609600754416, "learning_rate": 9.507604359387024e-05, "loss": 1.1363, "step": 3300 }, { "epoch": 0.1687656146433488, "grad_norm": 4.468070449560343, "learning_rate": 9.504025151641299e-05, "loss": 1.159, "step": 3310 }, { "epoch": 0.16927548054861571, "grad_norm": 5.861341903814554, "learning_rate": 9.500433661112575e-05, "loss": 1.1007, "step": 3320 }, { "epoch": 0.16978534645388263, "grad_norm": 4.483499400910527, "learning_rate": 9.496829897595089e-05, "loss": 1.1221, "step": 3330 }, { "epoch": 0.17029521235914954, "grad_norm": 5.074741849785658, "learning_rate": 9.493213870916552e-05, "loss": 1.1113, "step": 3340 }, { "epoch": 0.17080507826441646, "grad_norm": 3.9786022995626213, "learning_rate": 9.489585590938116e-05, "loss": 1.1238, "step": 3350 }, { "epoch": 0.17131494416968338, "grad_norm": 5.21394443419945, "learning_rate": 9.485945067554344e-05, "loss": 1.1357, "step": 3360 }, { "epoch": 0.1718248100749503, "grad_norm": 4.142795672158372, "learning_rate": 9.482292310693191e-05, "loss": 1.1089, "step": 3370 }, { "epoch": 0.1723346759802172, "grad_norm": 3.8294513054176362, "learning_rate": 9.478627330315975e-05, "loss": 1.1337, "step": 3380 }, { "epoch": 0.17284454188548412, "grad_norm": 3.1799263389534675, "learning_rate": 9.474950136417347e-05, "loss": 1.1452, "step": 3390 }, { "epoch": 0.17335440779075104, "grad_norm": 4.8079087991100735, "learning_rate": 9.471260739025263e-05, "loss": 1.1363, "step": 3400 }, { "epoch": 0.17386427369601795, "grad_norm": 4.781900661326968, "learning_rate": 9.467559148200964e-05, "loss": 1.0781, "step": 3410 }, { "epoch": 0.17437413960128487, "grad_norm": 3.994191358886494, "learning_rate": 9.463845374038935e-05, "loss": 1.1218, "step": 3420 }, { "epoch": 0.17488400550655178, "grad_norm": 4.75943478869446, "learning_rate": 9.460119426666895e-05, "loss": 1.1882, "step": 3430 }, { "epoch": 0.1753938714118187, "grad_norm": 5.6089661449576305, "learning_rate": 9.456381316245756e-05, "loss": 1.0581, "step": 3440 }, { "epoch": 0.1759037373170856, "grad_norm": 7.583108949043354, "learning_rate": 9.452631052969597e-05, "loss": 1.0792, "step": 3450 }, { "epoch": 0.17641360322235253, "grad_norm": 6.1835180845992594, "learning_rate": 9.448868647065642e-05, "loss": 1.0849, "step": 3460 }, { "epoch": 0.17692346912761944, "grad_norm": 4.488568933309487, "learning_rate": 9.445094108794228e-05, "loss": 1.1, "step": 3470 }, { "epoch": 0.17743333503288636, "grad_norm": 6.296838767135793, "learning_rate": 9.441307448448778e-05, "loss": 1.1232, "step": 3480 }, { "epoch": 0.17794320093815327, "grad_norm": 6.106789989120955, "learning_rate": 9.437508676355773e-05, "loss": 1.0666, "step": 3490 }, { "epoch": 0.1784530668434202, "grad_norm": 4.830440619495212, "learning_rate": 9.433697802874721e-05, "loss": 1.136, "step": 3500 }, { "epoch": 0.1789629327486871, "grad_norm": 4.582083565033262, "learning_rate": 9.429874838398134e-05, "loss": 1.1087, "step": 3510 }, { "epoch": 0.17947279865395402, "grad_norm": 4.4505207891650285, "learning_rate": 9.426039793351498e-05, "loss": 1.2229, "step": 3520 }, { "epoch": 0.17998266455922093, "grad_norm": 4.073833569670933, "learning_rate": 9.422192678193238e-05, "loss": 1.1443, "step": 3530 }, { "epoch": 0.18049253046448785, "grad_norm": 6.174973106473309, "learning_rate": 9.418333503414701e-05, "loss": 1.1453, "step": 3540 }, { "epoch": 0.18100239636975476, "grad_norm": 6.57859848265449, "learning_rate": 9.41446227954012e-05, "loss": 1.1038, "step": 3550 }, { "epoch": 0.18151226227502168, "grad_norm": 8.479133870904418, "learning_rate": 9.410579017126586e-05, "loss": 1.1265, "step": 3560 }, { "epoch": 0.1820221281802886, "grad_norm": 4.42253230283525, "learning_rate": 9.406683726764018e-05, "loss": 1.1098, "step": 3570 }, { "epoch": 0.1825319940855555, "grad_norm": 3.829827815809406, "learning_rate": 9.402776419075138e-05, "loss": 1.1131, "step": 3580 }, { "epoch": 0.18304185999082243, "grad_norm": 7.4706313494248375, "learning_rate": 9.398857104715441e-05, "loss": 1.1661, "step": 3590 }, { "epoch": 0.18355172589608934, "grad_norm": 5.157105013968114, "learning_rate": 9.394925794373162e-05, "loss": 1.0779, "step": 3600 }, { "epoch": 0.18406159180135626, "grad_norm": 5.7160746841889924, "learning_rate": 9.390982498769254e-05, "loss": 1.1154, "step": 3610 }, { "epoch": 0.18457145770662317, "grad_norm": 4.263780490016166, "learning_rate": 9.387027228657352e-05, "loss": 1.1213, "step": 3620 }, { "epoch": 0.1850813236118901, "grad_norm": 5.974524640438817, "learning_rate": 9.383059994823747e-05, "loss": 1.1598, "step": 3630 }, { "epoch": 0.185591189517157, "grad_norm": 3.4619650322325444, "learning_rate": 9.379080808087353e-05, "loss": 1.1139, "step": 3640 }, { "epoch": 0.1861010554224239, "grad_norm": 4.944482287429536, "learning_rate": 9.375089679299684e-05, "loss": 1.1143, "step": 3650 }, { "epoch": 0.1866109213276908, "grad_norm": 4.173778226175053, "learning_rate": 9.371086619344822e-05, "loss": 1.0866, "step": 3660 }, { "epoch": 0.18712078723295772, "grad_norm": 5.4238742287923705, "learning_rate": 9.367071639139381e-05, "loss": 1.2034, "step": 3670 }, { "epoch": 0.18763065313822463, "grad_norm": 3.7178235508537174, "learning_rate": 9.363044749632488e-05, "loss": 1.0685, "step": 3680 }, { "epoch": 0.18814051904349155, "grad_norm": 7.491473532606374, "learning_rate": 9.359005961805743e-05, "loss": 1.0924, "step": 3690 }, { "epoch": 0.18865038494875846, "grad_norm": 4.245239185493443, "learning_rate": 9.354955286673192e-05, "loss": 1.0636, "step": 3700 }, { "epoch": 0.18916025085402538, "grad_norm": 5.707722548518677, "learning_rate": 9.350892735281308e-05, "loss": 1.113, "step": 3710 }, { "epoch": 0.1896701167592923, "grad_norm": 6.702902819523267, "learning_rate": 9.34681831870894e-05, "loss": 1.1233, "step": 3720 }, { "epoch": 0.1901799826645592, "grad_norm": 5.440354574023026, "learning_rate": 9.342732048067302e-05, "loss": 1.0976, "step": 3730 }, { "epoch": 0.19068984856982613, "grad_norm": 5.034338134908323, "learning_rate": 9.338633934499932e-05, "loss": 1.1727, "step": 3740 }, { "epoch": 0.19119971447509304, "grad_norm": 5.32930802456178, "learning_rate": 9.334523989182661e-05, "loss": 1.0973, "step": 3750 }, { "epoch": 0.19170958038035996, "grad_norm": 4.523880335546063, "learning_rate": 9.330402223323595e-05, "loss": 1.1695, "step": 3760 }, { "epoch": 0.19221944628562687, "grad_norm": 3.9403859738446796, "learning_rate": 9.326268648163064e-05, "loss": 1.1134, "step": 3770 }, { "epoch": 0.1927293121908938, "grad_norm": 3.3662823656403495, "learning_rate": 9.322123274973613e-05, "loss": 1.1634, "step": 3780 }, { "epoch": 0.1932391780961607, "grad_norm": 3.029874138789195, "learning_rate": 9.317966115059956e-05, "loss": 1.0194, "step": 3790 }, { "epoch": 0.19374904400142762, "grad_norm": 4.226741304538432, "learning_rate": 9.31379717975895e-05, "loss": 1.0669, "step": 3800 }, { "epoch": 0.19425890990669453, "grad_norm": 3.4250576879529993, "learning_rate": 9.309616480439564e-05, "loss": 1.1222, "step": 3810 }, { "epoch": 0.19476877581196145, "grad_norm": 4.618185836785933, "learning_rate": 9.305424028502852e-05, "loss": 1.0713, "step": 3820 }, { "epoch": 0.19527864171722836, "grad_norm": 3.3429157735662294, "learning_rate": 9.301219835381914e-05, "loss": 1.0282, "step": 3830 }, { "epoch": 0.19578850762249528, "grad_norm": 4.15707318845732, "learning_rate": 9.297003912541871e-05, "loss": 1.1, "step": 3840 }, { "epoch": 0.1962983735277622, "grad_norm": 4.657258148198954, "learning_rate": 9.29277627147983e-05, "loss": 1.0841, "step": 3850 }, { "epoch": 0.1968082394330291, "grad_norm": 4.056316857136649, "learning_rate": 9.28853692372486e-05, "loss": 1.0607, "step": 3860 }, { "epoch": 0.19731810533829602, "grad_norm": 3.580812269523134, "learning_rate": 9.284285880837946e-05, "loss": 1.1802, "step": 3870 }, { "epoch": 0.19782797124356294, "grad_norm": 4.750882133120819, "learning_rate": 9.280023154411974e-05, "loss": 1.1562, "step": 3880 }, { "epoch": 0.19833783714882985, "grad_norm": 4.351795862653647, "learning_rate": 9.275748756071689e-05, "loss": 1.1345, "step": 3890 }, { "epoch": 0.19884770305409677, "grad_norm": 4.370634001939076, "learning_rate": 9.271462697473665e-05, "loss": 1.1158, "step": 3900 }, { "epoch": 0.19935756895936368, "grad_norm": 4.593193453141612, "learning_rate": 9.267164990306274e-05, "loss": 1.1004, "step": 3910 }, { "epoch": 0.1998674348646306, "grad_norm": 5.0217760322668035, "learning_rate": 9.262855646289659e-05, "loss": 1.0957, "step": 3920 }, { "epoch": 0.20037730076989752, "grad_norm": 4.414745461532738, "learning_rate": 9.258534677175692e-05, "loss": 1.1278, "step": 3930 }, { "epoch": 0.20088716667516443, "grad_norm": 7.850860756941811, "learning_rate": 9.254202094747952e-05, "loss": 1.2159, "step": 3940 }, { "epoch": 0.20139703258043135, "grad_norm": 4.576300386213128, "learning_rate": 9.249857910821682e-05, "loss": 1.053, "step": 3950 }, { "epoch": 0.20190689848569826, "grad_norm": 5.669850587852958, "learning_rate": 9.245502137243771e-05, "loss": 1.0737, "step": 3960 }, { "epoch": 0.20241676439096518, "grad_norm": 3.841790288194421, "learning_rate": 9.24113478589271e-05, "loss": 1.084, "step": 3970 }, { "epoch": 0.2029266302962321, "grad_norm": 4.5287085004351235, "learning_rate": 9.236755868678559e-05, "loss": 1.1155, "step": 3980 }, { "epoch": 0.203436496201499, "grad_norm": 4.586885578600499, "learning_rate": 9.232365397542926e-05, "loss": 1.0752, "step": 3990 }, { "epoch": 0.20394636210676592, "grad_norm": 9.096774082551118, "learning_rate": 9.227963384458924e-05, "loss": 1.0771, "step": 4000 }, { "epoch": 0.20445622801203284, "grad_norm": 6.091396218918115, "learning_rate": 9.223549841431144e-05, "loss": 1.1101, "step": 4010 }, { "epoch": 0.20496609391729975, "grad_norm": 6.385041154765648, "learning_rate": 9.219124780495616e-05, "loss": 1.11, "step": 4020 }, { "epoch": 0.20547595982256667, "grad_norm": 3.497672542439059, "learning_rate": 9.214688213719782e-05, "loss": 1.1167, "step": 4030 }, { "epoch": 0.20598582572783358, "grad_norm": 6.632733925092718, "learning_rate": 9.210240153202462e-05, "loss": 1.1654, "step": 4040 }, { "epoch": 0.2064956916331005, "grad_norm": 5.1979095263524, "learning_rate": 9.205780611073822e-05, "loss": 1.1256, "step": 4050 }, { "epoch": 0.2070055575383674, "grad_norm": 4.725662413664164, "learning_rate": 9.201309599495333e-05, "loss": 1.1613, "step": 4060 }, { "epoch": 0.20751542344363433, "grad_norm": 4.198446410312713, "learning_rate": 9.19682713065975e-05, "loss": 1.1072, "step": 4070 }, { "epoch": 0.20802528934890124, "grad_norm": 5.632096006878919, "learning_rate": 9.19233321679107e-05, "loss": 1.0694, "step": 4080 }, { "epoch": 0.20853515525416816, "grad_norm": 5.15508852329062, "learning_rate": 9.187827870144501e-05, "loss": 1.1042, "step": 4090 }, { "epoch": 0.20904502115943507, "grad_norm": 3.768349793802397, "learning_rate": 9.18331110300643e-05, "loss": 1.0987, "step": 4100 }, { "epoch": 0.209554887064702, "grad_norm": 4.299188815003413, "learning_rate": 9.178782927694389e-05, "loss": 1.1768, "step": 4110 }, { "epoch": 0.2100647529699689, "grad_norm": 3.8096689734152287, "learning_rate": 9.17424335655702e-05, "loss": 1.1042, "step": 4120 }, { "epoch": 0.21057461887523582, "grad_norm": 5.602085196747135, "learning_rate": 9.169692401974043e-05, "loss": 1.1145, "step": 4130 }, { "epoch": 0.21108448478050273, "grad_norm": 4.969191904156465, "learning_rate": 9.165130076356219e-05, "loss": 1.1362, "step": 4140 }, { "epoch": 0.21159435068576965, "grad_norm": 6.843019217097579, "learning_rate": 9.160556392145321e-05, "loss": 1.1221, "step": 4150 }, { "epoch": 0.21210421659103657, "grad_norm": 3.499758917655891, "learning_rate": 9.155971361814097e-05, "loss": 1.0889, "step": 4160 }, { "epoch": 0.21261408249630348, "grad_norm": 4.621635526066649, "learning_rate": 9.151374997866237e-05, "loss": 1.1394, "step": 4170 }, { "epoch": 0.2131239484015704, "grad_norm": 4.414049418850473, "learning_rate": 9.146767312836339e-05, "loss": 1.1475, "step": 4180 }, { "epoch": 0.2136338143068373, "grad_norm": 4.975474445122677, "learning_rate": 9.142148319289872e-05, "loss": 1.0906, "step": 4190 }, { "epoch": 0.21414368021210423, "grad_norm": 4.470228851354492, "learning_rate": 9.137518029823146e-05, "loss": 1.0443, "step": 4200 }, { "epoch": 0.21465354611737114, "grad_norm": 4.023509264580457, "learning_rate": 9.132876457063275e-05, "loss": 1.1492, "step": 4210 }, { "epoch": 0.21516341202263806, "grad_norm": 4.467631871018525, "learning_rate": 9.128223613668145e-05, "loss": 1.0418, "step": 4220 }, { "epoch": 0.21567327792790497, "grad_norm": 3.7463610188428262, "learning_rate": 9.123559512326376e-05, "loss": 1.116, "step": 4230 }, { "epoch": 0.2161831438331719, "grad_norm": 6.753094731554434, "learning_rate": 9.118884165757288e-05, "loss": 1.0903, "step": 4240 }, { "epoch": 0.2166930097384388, "grad_norm": 6.939143231788462, "learning_rate": 9.114197586710873e-05, "loss": 1.0139, "step": 4250 }, { "epoch": 0.21720287564370572, "grad_norm": 3.7876691852546465, "learning_rate": 9.109499787967747e-05, "loss": 1.0381, "step": 4260 }, { "epoch": 0.21771274154897263, "grad_norm": 6.277095714725083, "learning_rate": 9.104790782339127e-05, "loss": 1.0978, "step": 4270 }, { "epoch": 0.21822260745423955, "grad_norm": 4.096173329213358, "learning_rate": 9.100070582666795e-05, "loss": 1.0067, "step": 4280 }, { "epoch": 0.21873247335950646, "grad_norm": 3.6961770600795623, "learning_rate": 9.095339201823055e-05, "loss": 1.1198, "step": 4290 }, { "epoch": 0.21924233926477338, "grad_norm": 5.972241108451364, "learning_rate": 9.090596652710702e-05, "loss": 1.1016, "step": 4300 }, { "epoch": 0.21975220517004027, "grad_norm": 4.313135247453813, "learning_rate": 9.085842948262992e-05, "loss": 1.1174, "step": 4310 }, { "epoch": 0.22026207107530718, "grad_norm": 4.234029096400693, "learning_rate": 9.081078101443602e-05, "loss": 1.1158, "step": 4320 }, { "epoch": 0.2207719369805741, "grad_norm": 4.492757703720211, "learning_rate": 9.076302125246592e-05, "loss": 1.0193, "step": 4330 }, { "epoch": 0.221281802885841, "grad_norm": 4.640881532623444, "learning_rate": 9.071515032696371e-05, "loss": 1.087, "step": 4340 }, { "epoch": 0.22179166879110793, "grad_norm": 3.617787752452594, "learning_rate": 9.066716836847673e-05, "loss": 1.058, "step": 4350 }, { "epoch": 0.22230153469637484, "grad_norm": 3.6014749404530466, "learning_rate": 9.061907550785498e-05, "loss": 1.07, "step": 4360 }, { "epoch": 0.22281140060164176, "grad_norm": 6.001862960183884, "learning_rate": 9.0570871876251e-05, "loss": 1.1156, "step": 4370 }, { "epoch": 0.22332126650690867, "grad_norm": 5.2487018233747955, "learning_rate": 9.052255760511934e-05, "loss": 1.0502, "step": 4380 }, { "epoch": 0.2238311324121756, "grad_norm": 3.1087029192988567, "learning_rate": 9.047413282621634e-05, "loss": 1.0547, "step": 4390 }, { "epoch": 0.2243409983174425, "grad_norm": 5.631068168508301, "learning_rate": 9.042559767159965e-05, "loss": 1.0321, "step": 4400 }, { "epoch": 0.22485086422270942, "grad_norm": 5.674692322000431, "learning_rate": 9.037695227362793e-05, "loss": 1.1467, "step": 4410 }, { "epoch": 0.22536073012797633, "grad_norm": 3.7896395368056934, "learning_rate": 9.032819676496052e-05, "loss": 1.113, "step": 4420 }, { "epoch": 0.22587059603324325, "grad_norm": 4.087460278114435, "learning_rate": 9.027933127855696e-05, "loss": 1.0775, "step": 4430 }, { "epoch": 0.22638046193851016, "grad_norm": 4.69842077796759, "learning_rate": 9.02303559476768e-05, "loss": 1.1074, "step": 4440 }, { "epoch": 0.22689032784377708, "grad_norm": 6.276622569586819, "learning_rate": 9.018127090587908e-05, "loss": 1.0951, "step": 4450 }, { "epoch": 0.227400193749044, "grad_norm": 5.388702778493809, "learning_rate": 9.013207628702205e-05, "loss": 1.0386, "step": 4460 }, { "epoch": 0.2279100596543109, "grad_norm": 5.487381906765069, "learning_rate": 9.008277222526278e-05, "loss": 1.0894, "step": 4470 }, { "epoch": 0.22841992555957782, "grad_norm": 3.023066526208634, "learning_rate": 9.003335885505682e-05, "loss": 1.0202, "step": 4480 }, { "epoch": 0.22892979146484474, "grad_norm": 4.6517471037198455, "learning_rate": 8.998383631115776e-05, "loss": 1.1101, "step": 4490 }, { "epoch": 0.22943965737011165, "grad_norm": 6.349629107082182, "learning_rate": 8.993420472861695e-05, "loss": 1.0721, "step": 4500 }, { "epoch": 0.22994952327537857, "grad_norm": 4.227187789476838, "learning_rate": 8.988446424278313e-05, "loss": 1.0836, "step": 4510 }, { "epoch": 0.23045938918064549, "grad_norm": 3.7667451821363094, "learning_rate": 8.983461498930195e-05, "loss": 1.0098, "step": 4520 }, { "epoch": 0.2309692550859124, "grad_norm": 5.61529724819574, "learning_rate": 8.978465710411574e-05, "loss": 1.122, "step": 4530 }, { "epoch": 0.23147912099117932, "grad_norm": 3.950252286659022, "learning_rate": 8.973459072346302e-05, "loss": 1.1244, "step": 4540 }, { "epoch": 0.23198898689644623, "grad_norm": 4.761507043362676, "learning_rate": 8.968441598387826e-05, "loss": 1.0686, "step": 4550 }, { "epoch": 0.23249885280171315, "grad_norm": 4.872182191461587, "learning_rate": 8.963413302219134e-05, "loss": 1.0078, "step": 4560 }, { "epoch": 0.23300871870698006, "grad_norm": 4.086834664724914, "learning_rate": 8.958374197552736e-05, "loss": 1.0911, "step": 4570 }, { "epoch": 0.23351858461224698, "grad_norm": 4.574015800523239, "learning_rate": 8.953324298130611e-05, "loss": 1.0988, "step": 4580 }, { "epoch": 0.2340284505175139, "grad_norm": 5.431453651680167, "learning_rate": 8.948263617724178e-05, "loss": 1.1378, "step": 4590 }, { "epoch": 0.2345383164227808, "grad_norm": 5.759183386680134, "learning_rate": 8.943192170134259e-05, "loss": 1.0764, "step": 4600 }, { "epoch": 0.23504818232804772, "grad_norm": 4.098486938090133, "learning_rate": 8.938109969191033e-05, "loss": 1.0611, "step": 4610 }, { "epoch": 0.23555804823331464, "grad_norm": 4.528706455199365, "learning_rate": 8.933017028754012e-05, "loss": 1.0934, "step": 4620 }, { "epoch": 0.23606791413858155, "grad_norm": 3.698168441687973, "learning_rate": 8.927913362711986e-05, "loss": 1.0981, "step": 4630 }, { "epoch": 0.23657778004384847, "grad_norm": 3.3076454363441115, "learning_rate": 8.922798984983005e-05, "loss": 1.0675, "step": 4640 }, { "epoch": 0.23708764594911538, "grad_norm": 4.7960174884936135, "learning_rate": 8.917673909514322e-05, "loss": 1.0734, "step": 4650 }, { "epoch": 0.2375975118543823, "grad_norm": 3.7657104564262904, "learning_rate": 8.912538150282366e-05, "loss": 1.0388, "step": 4660 }, { "epoch": 0.2381073777596492, "grad_norm": 2.735618317371669, "learning_rate": 8.907391721292702e-05, "loss": 1.0711, "step": 4670 }, { "epoch": 0.23861724366491613, "grad_norm": 3.9986942055432038, "learning_rate": 8.902234636579991e-05, "loss": 1.0441, "step": 4680 }, { "epoch": 0.23912710957018304, "grad_norm": 3.674282337978147, "learning_rate": 8.897066910207958e-05, "loss": 1.0557, "step": 4690 }, { "epoch": 0.23963697547544996, "grad_norm": 4.851417536326856, "learning_rate": 8.891888556269341e-05, "loss": 1.1019, "step": 4700 }, { "epoch": 0.24014684138071687, "grad_norm": 4.7533176487328115, "learning_rate": 8.886699588885866e-05, "loss": 1.0392, "step": 4710 }, { "epoch": 0.2406567072859838, "grad_norm": 3.406931358071219, "learning_rate": 8.881500022208195e-05, "loss": 1.019, "step": 4720 }, { "epoch": 0.2411665731912507, "grad_norm": 4.6383038465664885, "learning_rate": 8.876289870415905e-05, "loss": 1.0591, "step": 4730 }, { "epoch": 0.24167643909651762, "grad_norm": 3.4766136409230826, "learning_rate": 8.871069147717433e-05, "loss": 1.066, "step": 4740 }, { "epoch": 0.24218630500178454, "grad_norm": 3.583770256833579, "learning_rate": 8.865837868350045e-05, "loss": 1.0767, "step": 4750 }, { "epoch": 0.24269617090705145, "grad_norm": 4.096054842096785, "learning_rate": 8.860596046579794e-05, "loss": 1.0413, "step": 4760 }, { "epoch": 0.24320603681231837, "grad_norm": 4.604217588651983, "learning_rate": 8.855343696701488e-05, "loss": 1.0441, "step": 4770 }, { "epoch": 0.24371590271758528, "grad_norm": 2.3345138838960917, "learning_rate": 8.850080833038639e-05, "loss": 1.0091, "step": 4780 }, { "epoch": 0.2442257686228522, "grad_norm": 5.045281983640511, "learning_rate": 8.844807469943436e-05, "loss": 1.0512, "step": 4790 }, { "epoch": 0.2447356345281191, "grad_norm": 3.4826243080765806, "learning_rate": 8.839523621796699e-05, "loss": 1.0268, "step": 4800 }, { "epoch": 0.24524550043338603, "grad_norm": 6.338716603686131, "learning_rate": 8.834229303007841e-05, "loss": 1.0843, "step": 4810 }, { "epoch": 0.24575536633865294, "grad_norm": 3.255193678645787, "learning_rate": 8.828924528014828e-05, "loss": 1.1201, "step": 4820 }, { "epoch": 0.24626523224391986, "grad_norm": 3.1881589977771676, "learning_rate": 8.823609311284144e-05, "loss": 1.0373, "step": 4830 }, { "epoch": 0.24677509814918677, "grad_norm": 3.3439325019724633, "learning_rate": 8.81828366731075e-05, "loss": 1.0318, "step": 4840 }, { "epoch": 0.2472849640544537, "grad_norm": 3.719968743558736, "learning_rate": 8.812947610618031e-05, "loss": 1.1296, "step": 4850 }, { "epoch": 0.2477948299597206, "grad_norm": 3.3797188609486106, "learning_rate": 8.807601155757784e-05, "loss": 1.0437, "step": 4860 }, { "epoch": 0.24830469586498752, "grad_norm": 4.846704945951428, "learning_rate": 8.802244317310152e-05, "loss": 1.0502, "step": 4870 }, { "epoch": 0.24881456177025443, "grad_norm": 4.1767897486800365, "learning_rate": 8.796877109883598e-05, "loss": 1.0671, "step": 4880 }, { "epoch": 0.24932442767552135, "grad_norm": 2.971554217856899, "learning_rate": 8.791499548114863e-05, "loss": 1.1219, "step": 4890 }, { "epoch": 0.24983429358078826, "grad_norm": 5.2119781559216225, "learning_rate": 8.786111646668922e-05, "loss": 1.0953, "step": 4900 }, { "epoch": 0.25034415948605515, "grad_norm": 3.681987846013078, "learning_rate": 8.780713420238951e-05, "loss": 1.1017, "step": 4910 }, { "epoch": 0.25085402539132207, "grad_norm": 3.0278906655702302, "learning_rate": 8.775304883546279e-05, "loss": 1.0775, "step": 4920 }, { "epoch": 0.251363891296589, "grad_norm": 3.9195420892561255, "learning_rate": 8.769886051340353e-05, "loss": 1.0148, "step": 4930 }, { "epoch": 0.2518737572018559, "grad_norm": 3.565581280322952, "learning_rate": 8.7644569383987e-05, "loss": 1.0843, "step": 4940 }, { "epoch": 0.2523836231071228, "grad_norm": 5.761206813191621, "learning_rate": 8.759017559526876e-05, "loss": 1.1769, "step": 4950 }, { "epoch": 0.2528934890123897, "grad_norm": 3.1500014104883083, "learning_rate": 8.753567929558442e-05, "loss": 1.0727, "step": 4960 }, { "epoch": 0.25340335491765664, "grad_norm": 2.5769313666728144, "learning_rate": 8.748108063354906e-05, "loss": 1.034, "step": 4970 }, { "epoch": 0.25391322082292356, "grad_norm": 3.333945738470728, "learning_rate": 8.742637975805696e-05, "loss": 1.005, "step": 4980 }, { "epoch": 0.2544230867281905, "grad_norm": 3.536064757562939, "learning_rate": 8.737157681828112e-05, "loss": 1.0771, "step": 4990 }, { "epoch": 0.2549329526334574, "grad_norm": 4.126238329507708, "learning_rate": 8.73166719636729e-05, "loss": 1.0809, "step": 5000 }, { "epoch": 0.2554428185387243, "grad_norm": 4.040213145664409, "learning_rate": 8.726166534396157e-05, "loss": 1.0629, "step": 5010 }, { "epoch": 0.2559526844439912, "grad_norm": 4.570041080267456, "learning_rate": 8.720655710915393e-05, "loss": 1.0259, "step": 5020 }, { "epoch": 0.25646255034925813, "grad_norm": 4.423642343969428, "learning_rate": 8.715134740953386e-05, "loss": 1.0394, "step": 5030 }, { "epoch": 0.25697241625452505, "grad_norm": 2.7536372448154847, "learning_rate": 8.709603639566198e-05, "loss": 0.9952, "step": 5040 }, { "epoch": 0.25748228215979196, "grad_norm": 4.224827117877528, "learning_rate": 8.70406242183752e-05, "loss": 0.988, "step": 5050 }, { "epoch": 0.2579921480650589, "grad_norm": 4.509067034941248, "learning_rate": 8.698511102878628e-05, "loss": 1.0695, "step": 5060 }, { "epoch": 0.2585020139703258, "grad_norm": 5.9059767638098055, "learning_rate": 8.692949697828347e-05, "loss": 1.136, "step": 5070 }, { "epoch": 0.2590118798755927, "grad_norm": 3.916813048323415, "learning_rate": 8.687378221853008e-05, "loss": 1.1027, "step": 5080 }, { "epoch": 0.2595217457808596, "grad_norm": 4.494249905043309, "learning_rate": 8.681796690146404e-05, "loss": 1.1143, "step": 5090 }, { "epoch": 0.26003161168612654, "grad_norm": 4.444678199515402, "learning_rate": 8.676205117929752e-05, "loss": 1.1281, "step": 5100 }, { "epoch": 0.26054147759139346, "grad_norm": 5.7616406732136545, "learning_rate": 8.670603520451647e-05, "loss": 1.0357, "step": 5110 }, { "epoch": 0.26105134349666037, "grad_norm": 6.000264920560982, "learning_rate": 8.664991912988032e-05, "loss": 1.0236, "step": 5120 }, { "epoch": 0.2615612094019273, "grad_norm": 5.089299141780765, "learning_rate": 8.659370310842138e-05, "loss": 1.0294, "step": 5130 }, { "epoch": 0.2620710753071942, "grad_norm": 3.292913493702765, "learning_rate": 8.653738729344458e-05, "loss": 1.0661, "step": 5140 }, { "epoch": 0.2625809412124611, "grad_norm": 5.636099103217268, "learning_rate": 8.6480971838527e-05, "loss": 1.0012, "step": 5150 }, { "epoch": 0.26309080711772803, "grad_norm": 3.8951872942749413, "learning_rate": 8.642445689751736e-05, "loss": 1.0319, "step": 5160 }, { "epoch": 0.26360067302299495, "grad_norm": 5.6091029812361874, "learning_rate": 8.636784262453583e-05, "loss": 1.0849, "step": 5170 }, { "epoch": 0.26411053892826186, "grad_norm": 3.3948105111391884, "learning_rate": 8.631112917397331e-05, "loss": 1.0122, "step": 5180 }, { "epoch": 0.2646204048335288, "grad_norm": 4.348173789492771, "learning_rate": 8.625431670049129e-05, "loss": 1.059, "step": 5190 }, { "epoch": 0.2651302707387957, "grad_norm": 4.339709846251426, "learning_rate": 8.619740535902123e-05, "loss": 1.1468, "step": 5200 }, { "epoch": 0.2656401366440626, "grad_norm": 3.115045788546426, "learning_rate": 8.614039530476421e-05, "loss": 1.0592, "step": 5210 }, { "epoch": 0.2661500025493295, "grad_norm": 4.095364624809172, "learning_rate": 8.608328669319057e-05, "loss": 0.9942, "step": 5220 }, { "epoch": 0.26665986845459644, "grad_norm": 3.3320331932956524, "learning_rate": 8.602607968003935e-05, "loss": 1.0521, "step": 5230 }, { "epoch": 0.26716973435986335, "grad_norm": 4.433829696781261, "learning_rate": 8.596877442131798e-05, "loss": 1.0545, "step": 5240 }, { "epoch": 0.26767960026513027, "grad_norm": 5.22376798696404, "learning_rate": 8.591137107330178e-05, "loss": 1.0657, "step": 5250 }, { "epoch": 0.2681894661703972, "grad_norm": 3.96131667739153, "learning_rate": 8.58538697925336e-05, "loss": 1.0586, "step": 5260 }, { "epoch": 0.2686993320756641, "grad_norm": 3.076997333578663, "learning_rate": 8.579627073582334e-05, "loss": 0.9829, "step": 5270 }, { "epoch": 0.269209197980931, "grad_norm": 5.1691240713000575, "learning_rate": 8.573857406024756e-05, "loss": 1.1053, "step": 5280 }, { "epoch": 0.26971906388619793, "grad_norm": 4.846399103707031, "learning_rate": 8.568077992314902e-05, "loss": 1.0745, "step": 5290 }, { "epoch": 0.27022892979146484, "grad_norm": 3.8635471595242543, "learning_rate": 8.562288848213623e-05, "loss": 1.0291, "step": 5300 }, { "epoch": 0.27073879569673176, "grad_norm": 3.0295588380453613, "learning_rate": 8.55648998950831e-05, "loss": 1.0129, "step": 5310 }, { "epoch": 0.2712486616019987, "grad_norm": 2.878473080072395, "learning_rate": 8.550681432012848e-05, "loss": 0.9553, "step": 5320 }, { "epoch": 0.2717585275072656, "grad_norm": 4.083534102290873, "learning_rate": 8.544863191567566e-05, "loss": 1.015, "step": 5330 }, { "epoch": 0.2722683934125325, "grad_norm": 4.235388313648515, "learning_rate": 8.539035284039202e-05, "loss": 1.1118, "step": 5340 }, { "epoch": 0.2727782593177994, "grad_norm": 5.058878979715182, "learning_rate": 8.533197725320856e-05, "loss": 1.024, "step": 5350 }, { "epoch": 0.27328812522306634, "grad_norm": 4.051413812730975, "learning_rate": 8.527350531331948e-05, "loss": 1.0792, "step": 5360 }, { "epoch": 0.27379799112833325, "grad_norm": 4.2809206121762235, "learning_rate": 8.521493718018174e-05, "loss": 1.0242, "step": 5370 }, { "epoch": 0.27430785703360017, "grad_norm": 4.607774771425496, "learning_rate": 8.515627301351464e-05, "loss": 1.0761, "step": 5380 }, { "epoch": 0.2748177229388671, "grad_norm": 3.934021336258175, "learning_rate": 8.509751297329931e-05, "loss": 1.0023, "step": 5390 }, { "epoch": 0.275327588844134, "grad_norm": 4.539687565934454, "learning_rate": 8.503865721977842e-05, "loss": 0.989, "step": 5400 }, { "epoch": 0.2758374547494009, "grad_norm": 4.549502194307474, "learning_rate": 8.497970591345559e-05, "loss": 1.1012, "step": 5410 }, { "epoch": 0.2763473206546678, "grad_norm": 3.3418201574220414, "learning_rate": 8.492065921509506e-05, "loss": 1.0509, "step": 5420 }, { "epoch": 0.27685718655993474, "grad_norm": 3.44126776153577, "learning_rate": 8.486151728572117e-05, "loss": 0.9759, "step": 5430 }, { "epoch": 0.27736705246520166, "grad_norm": 4.7821638415619665, "learning_rate": 8.480228028661799e-05, "loss": 1.0115, "step": 5440 }, { "epoch": 0.2778769183704686, "grad_norm": 4.581162291071151, "learning_rate": 8.474294837932888e-05, "loss": 1.0219, "step": 5450 }, { "epoch": 0.2783867842757355, "grad_norm": 3.8733442150351505, "learning_rate": 8.468352172565594e-05, "loss": 1.0139, "step": 5460 }, { "epoch": 0.2788966501810024, "grad_norm": 2.8524393396533805, "learning_rate": 8.462400048765974e-05, "loss": 1.0764, "step": 5470 }, { "epoch": 0.2794065160862693, "grad_norm": 3.914135068684852, "learning_rate": 8.456438482765871e-05, "loss": 1.0079, "step": 5480 }, { "epoch": 0.27991638199153623, "grad_norm": 6.970809052094916, "learning_rate": 8.450467490822885e-05, "loss": 1.0655, "step": 5490 }, { "epoch": 0.28042624789680315, "grad_norm": 4.533422666881598, "learning_rate": 8.444487089220311e-05, "loss": 1.0785, "step": 5500 }, { "epoch": 0.28093611380207006, "grad_norm": 4.364030587991842, "learning_rate": 8.438497294267117e-05, "loss": 1.0514, "step": 5510 }, { "epoch": 0.281445979707337, "grad_norm": 3.5954620791553076, "learning_rate": 8.432498122297878e-05, "loss": 1.1249, "step": 5520 }, { "epoch": 0.2819558456126039, "grad_norm": 5.088004201092307, "learning_rate": 8.426489589672746e-05, "loss": 1.1032, "step": 5530 }, { "epoch": 0.2824657115178708, "grad_norm": 4.350406613191002, "learning_rate": 8.420471712777397e-05, "loss": 1.0245, "step": 5540 }, { "epoch": 0.2829755774231377, "grad_norm": 3.7689968249233754, "learning_rate": 8.414444508022993e-05, "loss": 1.0566, "step": 5550 }, { "epoch": 0.28348544332840464, "grad_norm": 2.3206403906274478, "learning_rate": 8.408407991846128e-05, "loss": 0.9902, "step": 5560 }, { "epoch": 0.28399530923367156, "grad_norm": 3.2197793343063683, "learning_rate": 8.402362180708792e-05, "loss": 1.0255, "step": 5570 }, { "epoch": 0.28450517513893847, "grad_norm": 2.979548851656216, "learning_rate": 8.396307091098327e-05, "loss": 1.0217, "step": 5580 }, { "epoch": 0.2850150410442054, "grad_norm": 2.764591703548254, "learning_rate": 8.390242739527374e-05, "loss": 1.0413, "step": 5590 }, { "epoch": 0.2855249069494723, "grad_norm": 5.66720911548646, "learning_rate": 8.384169142533829e-05, "loss": 1.0558, "step": 5600 }, { "epoch": 0.2860347728547392, "grad_norm": 3.4636637664330054, "learning_rate": 8.378086316680805e-05, "loss": 1.0063, "step": 5610 }, { "epoch": 0.28654463876000613, "grad_norm": 4.561263426716149, "learning_rate": 8.371994278556585e-05, "loss": 1.0149, "step": 5620 }, { "epoch": 0.28705450466527305, "grad_norm": 3.6249633538048838, "learning_rate": 8.365893044774567e-05, "loss": 1.0217, "step": 5630 }, { "epoch": 0.28756437057053996, "grad_norm": 4.269152867615141, "learning_rate": 8.359782631973233e-05, "loss": 1.0127, "step": 5640 }, { "epoch": 0.2880742364758069, "grad_norm": 2.8866554626622665, "learning_rate": 8.353663056816094e-05, "loss": 0.9962, "step": 5650 }, { "epoch": 0.2885841023810738, "grad_norm": 6.427955292993714, "learning_rate": 8.347534335991649e-05, "loss": 1.0473, "step": 5660 }, { "epoch": 0.2890939682863407, "grad_norm": 3.645354303457835, "learning_rate": 8.341396486213336e-05, "loss": 1.0641, "step": 5670 }, { "epoch": 0.2896038341916076, "grad_norm": 2.9236763704046207, "learning_rate": 8.335249524219488e-05, "loss": 0.9943, "step": 5680 }, { "epoch": 0.29011370009687454, "grad_norm": 3.0417158427244733, "learning_rate": 8.329093466773288e-05, "loss": 1.0266, "step": 5690 }, { "epoch": 0.29062356600214145, "grad_norm": 4.798262050732901, "learning_rate": 8.322928330662725e-05, "loss": 1.087, "step": 5700 }, { "epoch": 0.29113343190740837, "grad_norm": 3.387281294186836, "learning_rate": 8.316754132700546e-05, "loss": 1.0454, "step": 5710 }, { "epoch": 0.2916432978126753, "grad_norm": 3.3192428360977217, "learning_rate": 8.310570889724204e-05, "loss": 1.1112, "step": 5720 }, { "epoch": 0.2921531637179422, "grad_norm": 4.941941616584977, "learning_rate": 8.304378618595828e-05, "loss": 1.018, "step": 5730 }, { "epoch": 0.2926630296232091, "grad_norm": 4.190731981597661, "learning_rate": 8.29817733620216e-05, "loss": 1.03, "step": 5740 }, { "epoch": 0.29317289552847603, "grad_norm": 3.880453031436918, "learning_rate": 8.29196705945452e-05, "loss": 1.0078, "step": 5750 }, { "epoch": 0.29368276143374294, "grad_norm": 3.4907793070686135, "learning_rate": 8.285747805288756e-05, "loss": 1.0948, "step": 5760 }, { "epoch": 0.29419262733900986, "grad_norm": 3.411510028798704, "learning_rate": 8.279519590665194e-05, "loss": 1.0575, "step": 5770 }, { "epoch": 0.2947024932442768, "grad_norm": 4.229858530996611, "learning_rate": 8.273282432568603e-05, "loss": 0.967, "step": 5780 }, { "epoch": 0.2952123591495437, "grad_norm": 6.096210130250228, "learning_rate": 8.267036348008135e-05, "loss": 1.0062, "step": 5790 }, { "epoch": 0.2957222250548106, "grad_norm": 6.587238238561264, "learning_rate": 8.260781354017288e-05, "loss": 1.084, "step": 5800 }, { "epoch": 0.2962320909600775, "grad_norm": 3.7764469559456213, "learning_rate": 8.254517467653858e-05, "loss": 1.004, "step": 5810 }, { "epoch": 0.29674195686534444, "grad_norm": 4.446471175619889, "learning_rate": 8.248244705999884e-05, "loss": 1.003, "step": 5820 }, { "epoch": 0.29725182277061135, "grad_norm": 2.8223993096422095, "learning_rate": 8.241963086161619e-05, "loss": 1.0444, "step": 5830 }, { "epoch": 0.29776168867587827, "grad_norm": 3.407486819057597, "learning_rate": 8.235672625269467e-05, "loss": 1.0348, "step": 5840 }, { "epoch": 0.2982715545811452, "grad_norm": 6.416390270421421, "learning_rate": 8.229373340477942e-05, "loss": 1.0075, "step": 5850 }, { "epoch": 0.2987814204864121, "grad_norm": 4.310043434326265, "learning_rate": 8.22306524896562e-05, "loss": 1.0525, "step": 5860 }, { "epoch": 0.299291286391679, "grad_norm": 4.328322807326321, "learning_rate": 8.216748367935098e-05, "loss": 1.054, "step": 5870 }, { "epoch": 0.2998011522969459, "grad_norm": 3.373534725731803, "learning_rate": 8.210422714612939e-05, "loss": 1.067, "step": 5880 }, { "epoch": 0.30031101820221284, "grad_norm": 3.9158391011605205, "learning_rate": 8.204088306249633e-05, "loss": 0.9902, "step": 5890 }, { "epoch": 0.30082088410747976, "grad_norm": 3.4756763441960103, "learning_rate": 8.19774516011954e-05, "loss": 0.9483, "step": 5900 }, { "epoch": 0.3013307500127467, "grad_norm": 4.287868146820267, "learning_rate": 8.191393293520851e-05, "loss": 1.0206, "step": 5910 }, { "epoch": 0.3018406159180136, "grad_norm": 2.7925722726812108, "learning_rate": 8.185032723775539e-05, "loss": 1.0463, "step": 5920 }, { "epoch": 0.3023504818232805, "grad_norm": 3.7782406839466924, "learning_rate": 8.178663468229308e-05, "loss": 1.0633, "step": 5930 }, { "epoch": 0.3028603477285474, "grad_norm": 3.4570275115591556, "learning_rate": 8.172285544251557e-05, "loss": 1.0559, "step": 5940 }, { "epoch": 0.30337021363381433, "grad_norm": 3.8757098152805125, "learning_rate": 8.165898969235313e-05, "loss": 1.0326, "step": 5950 }, { "epoch": 0.30388007953908125, "grad_norm": 3.734392643562509, "learning_rate": 8.159503760597203e-05, "loss": 1.0562, "step": 5960 }, { "epoch": 0.3043899454443481, "grad_norm": 4.139058419280422, "learning_rate": 8.153099935777394e-05, "loss": 1.0778, "step": 5970 }, { "epoch": 0.304899811349615, "grad_norm": 3.3481925980290437, "learning_rate": 8.146687512239555e-05, "loss": 1.0, "step": 5980 }, { "epoch": 0.30540967725488194, "grad_norm": 3.8821853086298796, "learning_rate": 8.140266507470797e-05, "loss": 1.0344, "step": 5990 }, { "epoch": 0.30591954316014885, "grad_norm": 3.0577541719194046, "learning_rate": 8.133836938981642e-05, "loss": 1.0506, "step": 6000 }, { "epoch": 0.30642940906541577, "grad_norm": 4.378539580387895, "learning_rate": 8.127398824305959e-05, "loss": 1.0098, "step": 6010 }, { "epoch": 0.3069392749706827, "grad_norm": 3.05104160667796, "learning_rate": 8.120952181000922e-05, "loss": 1.0452, "step": 6020 }, { "epoch": 0.3074491408759496, "grad_norm": 2.9360691762300384, "learning_rate": 8.114497026646967e-05, "loss": 1.0462, "step": 6030 }, { "epoch": 0.3079590067812165, "grad_norm": 3.3758398442585187, "learning_rate": 8.108033378847741e-05, "loss": 1.0352, "step": 6040 }, { "epoch": 0.30846887268648343, "grad_norm": 3.7461045036368765, "learning_rate": 8.10156125523005e-05, "loss": 1.024, "step": 6050 }, { "epoch": 0.30897873859175035, "grad_norm": 4.283391682645258, "learning_rate": 8.095080673443817e-05, "loss": 1.0292, "step": 6060 }, { "epoch": 0.30948860449701726, "grad_norm": 4.432078710842605, "learning_rate": 8.088591651162027e-05, "loss": 0.9988, "step": 6070 }, { "epoch": 0.3099984704022842, "grad_norm": 3.3753821189210824, "learning_rate": 8.082094206080685e-05, "loss": 1.017, "step": 6080 }, { "epoch": 0.3105083363075511, "grad_norm": 3.384123032790279, "learning_rate": 8.075588355918767e-05, "loss": 1.0143, "step": 6090 }, { "epoch": 0.311018202212818, "grad_norm": 3.7123470517940844, "learning_rate": 8.06907411841817e-05, "loss": 0.9309, "step": 6100 }, { "epoch": 0.3115280681180849, "grad_norm": 5.4203110416183025, "learning_rate": 8.06255151134366e-05, "loss": 1.0198, "step": 6110 }, { "epoch": 0.31203793402335184, "grad_norm": 3.369729109830673, "learning_rate": 8.056020552482833e-05, "loss": 1.0045, "step": 6120 }, { "epoch": 0.31254779992861875, "grad_norm": 2.9044132742638076, "learning_rate": 8.049481259646057e-05, "loss": 1.0379, "step": 6130 }, { "epoch": 0.31305766583388567, "grad_norm": 3.6956344782317756, "learning_rate": 8.042933650666429e-05, "loss": 1.0147, "step": 6140 }, { "epoch": 0.3135675317391526, "grad_norm": 4.138698196151885, "learning_rate": 8.036377743399723e-05, "loss": 0.9836, "step": 6150 }, { "epoch": 0.3140773976444195, "grad_norm": 3.9726914998524134, "learning_rate": 8.029813555724343e-05, "loss": 1.0766, "step": 6160 }, { "epoch": 0.3145872635496864, "grad_norm": 3.2278452452583983, "learning_rate": 8.02324110554128e-05, "loss": 1.073, "step": 6170 }, { "epoch": 0.31509712945495333, "grad_norm": 4.036110424011256, "learning_rate": 8.016660410774048e-05, "loss": 1.0414, "step": 6180 }, { "epoch": 0.31560699536022024, "grad_norm": 3.4721676395767753, "learning_rate": 8.010071489368651e-05, "loss": 1.0064, "step": 6190 }, { "epoch": 0.31611686126548716, "grad_norm": 4.171481218349725, "learning_rate": 8.003474359293527e-05, "loss": 1.0408, "step": 6200 }, { "epoch": 0.3166267271707541, "grad_norm": 3.0393525201797007, "learning_rate": 7.996869038539497e-05, "loss": 0.9748, "step": 6210 }, { "epoch": 0.317136593076021, "grad_norm": 4.086813068218064, "learning_rate": 7.990255545119721e-05, "loss": 1.1157, "step": 6220 }, { "epoch": 0.3176464589812879, "grad_norm": 4.028125555211706, "learning_rate": 7.983633897069645e-05, "loss": 1.0142, "step": 6230 }, { "epoch": 0.3181563248865548, "grad_norm": 3.648022023047796, "learning_rate": 7.977004112446954e-05, "loss": 0.9725, "step": 6240 }, { "epoch": 0.31866619079182174, "grad_norm": 6.246011705858574, "learning_rate": 7.970366209331521e-05, "loss": 1.0264, "step": 6250 }, { "epoch": 0.31917605669708865, "grad_norm": 3.30169828484668, "learning_rate": 7.96372020582536e-05, "loss": 0.9746, "step": 6260 }, { "epoch": 0.31968592260235557, "grad_norm": 5.6030520690217065, "learning_rate": 7.957066120052575e-05, "loss": 1.0492, "step": 6270 }, { "epoch": 0.3201957885076225, "grad_norm": 4.316399453033829, "learning_rate": 7.95040397015931e-05, "loss": 1.0394, "step": 6280 }, { "epoch": 0.3207056544128894, "grad_norm": 6.31392723555592, "learning_rate": 7.943733774313702e-05, "loss": 1.0226, "step": 6290 }, { "epoch": 0.3212155203181563, "grad_norm": 7.656997675995733, "learning_rate": 7.937055550705826e-05, "loss": 1.0446, "step": 6300 }, { "epoch": 0.3217253862234232, "grad_norm": 4.191946074391032, "learning_rate": 7.930369317547655e-05, "loss": 1.083, "step": 6310 }, { "epoch": 0.32223525212869014, "grad_norm": 3.3920913029142206, "learning_rate": 7.923675093073002e-05, "loss": 1.0026, "step": 6320 }, { "epoch": 0.32274511803395706, "grad_norm": 2.8751648166684634, "learning_rate": 7.916972895537471e-05, "loss": 0.9979, "step": 6330 }, { "epoch": 0.32325498393922397, "grad_norm": 8.186076568409813, "learning_rate": 7.91026274321841e-05, "loss": 1.0477, "step": 6340 }, { "epoch": 0.3237648498444909, "grad_norm": 3.970608260524862, "learning_rate": 7.903544654414863e-05, "loss": 1.0917, "step": 6350 }, { "epoch": 0.3242747157497578, "grad_norm": 3.9804963341833255, "learning_rate": 7.896818647447511e-05, "loss": 1.0293, "step": 6360 }, { "epoch": 0.3247845816550247, "grad_norm": 4.759285251414038, "learning_rate": 7.890084740658638e-05, "loss": 0.9541, "step": 6370 }, { "epoch": 0.32529444756029163, "grad_norm": 4.5870844287880645, "learning_rate": 7.883342952412065e-05, "loss": 1.0821, "step": 6380 }, { "epoch": 0.32580431346555855, "grad_norm": 3.954644094383673, "learning_rate": 7.876593301093104e-05, "loss": 1.0223, "step": 6390 }, { "epoch": 0.32631417937082546, "grad_norm": 3.3617478597581214, "learning_rate": 7.869835805108514e-05, "loss": 0.9586, "step": 6400 }, { "epoch": 0.3268240452760924, "grad_norm": 4.6543609804340775, "learning_rate": 7.86307048288645e-05, "loss": 0.9766, "step": 6410 }, { "epoch": 0.3273339111813593, "grad_norm": 3.418656450886225, "learning_rate": 7.8562973528764e-05, "loss": 1.0115, "step": 6420 }, { "epoch": 0.3278437770866262, "grad_norm": 2.553194693948127, "learning_rate": 7.849516433549157e-05, "loss": 1.0203, "step": 6430 }, { "epoch": 0.3283536429918931, "grad_norm": 5.1167976745938635, "learning_rate": 7.842727743396744e-05, "loss": 1.094, "step": 6440 }, { "epoch": 0.32886350889716004, "grad_norm": 3.0287183807221933, "learning_rate": 7.835931300932384e-05, "loss": 1.0347, "step": 6450 }, { "epoch": 0.32937337480242695, "grad_norm": 4.0945458903160405, "learning_rate": 7.829127124690435e-05, "loss": 1.0477, "step": 6460 }, { "epoch": 0.32988324070769387, "grad_norm": 5.405991749147465, "learning_rate": 7.822315233226352e-05, "loss": 1.0433, "step": 6470 }, { "epoch": 0.3303931066129608, "grad_norm": 2.6984366185386697, "learning_rate": 7.815495645116623e-05, "loss": 0.9836, "step": 6480 }, { "epoch": 0.3309029725182277, "grad_norm": 3.19451431480357, "learning_rate": 7.808668378958731e-05, "loss": 1.0136, "step": 6490 }, { "epoch": 0.3314128384234946, "grad_norm": 3.691317420027209, "learning_rate": 7.801833453371095e-05, "loss": 1.0022, "step": 6500 }, { "epoch": 0.33192270432876153, "grad_norm": 3.58253893542762, "learning_rate": 7.794990886993023e-05, "loss": 1.0686, "step": 6510 }, { "epoch": 0.33243257023402845, "grad_norm": 3.2270538104667543, "learning_rate": 7.788140698484656e-05, "loss": 1.0777, "step": 6520 }, { "epoch": 0.33294243613929536, "grad_norm": 3.825309738285254, "learning_rate": 7.781282906526926e-05, "loss": 1.0087, "step": 6530 }, { "epoch": 0.3334523020445623, "grad_norm": 3.431292200662794, "learning_rate": 7.774417529821498e-05, "loss": 1.0295, "step": 6540 }, { "epoch": 0.3339621679498292, "grad_norm": 3.4332600020944493, "learning_rate": 7.767544587090723e-05, "loss": 1.0361, "step": 6550 }, { "epoch": 0.3344720338550961, "grad_norm": 3.9414270543862218, "learning_rate": 7.76066409707758e-05, "loss": 0.9868, "step": 6560 }, { "epoch": 0.334981899760363, "grad_norm": 2.7595081933963903, "learning_rate": 7.753776078545636e-05, "loss": 1.034, "step": 6570 }, { "epoch": 0.33549176566562994, "grad_norm": 3.596539770858611, "learning_rate": 7.746880550278985e-05, "loss": 1.0214, "step": 6580 }, { "epoch": 0.33600163157089685, "grad_norm": 3.373828688577344, "learning_rate": 7.739977531082201e-05, "loss": 1.0053, "step": 6590 }, { "epoch": 0.33651149747616377, "grad_norm": 3.6170327929539425, "learning_rate": 7.733067039780288e-05, "loss": 0.955, "step": 6600 }, { "epoch": 0.3370213633814307, "grad_norm": 2.9437991532594703, "learning_rate": 7.726149095218627e-05, "loss": 0.9963, "step": 6610 }, { "epoch": 0.3375312292866976, "grad_norm": 4.1275030371627, "learning_rate": 7.719223716262922e-05, "loss": 1.0393, "step": 6620 }, { "epoch": 0.3380410951919645, "grad_norm": 3.0482612321416576, "learning_rate": 7.712290921799153e-05, "loss": 1.0777, "step": 6630 }, { "epoch": 0.33855096109723143, "grad_norm": 2.766889269862679, "learning_rate": 7.705350730733523e-05, "loss": 0.9314, "step": 6640 }, { "epoch": 0.33906082700249834, "grad_norm": 4.2316096098249245, "learning_rate": 7.698403161992403e-05, "loss": 0.9823, "step": 6650 }, { "epoch": 0.33957069290776526, "grad_norm": 4.5605608461768785, "learning_rate": 7.691448234522285e-05, "loss": 1.024, "step": 6660 }, { "epoch": 0.3400805588130322, "grad_norm": 3.7882617251352144, "learning_rate": 7.684485967289733e-05, "loss": 1.0745, "step": 6670 }, { "epoch": 0.3405904247182991, "grad_norm": 3.827009603055397, "learning_rate": 7.677516379281321e-05, "loss": 1.0147, "step": 6680 }, { "epoch": 0.341100290623566, "grad_norm": 3.58265864202473, "learning_rate": 7.67053948950359e-05, "loss": 1.0131, "step": 6690 }, { "epoch": 0.3416101565288329, "grad_norm": 2.819232340917403, "learning_rate": 7.663555316982994e-05, "loss": 1.0394, "step": 6700 }, { "epoch": 0.34212002243409984, "grad_norm": 5.162201471419937, "learning_rate": 7.656563880765846e-05, "loss": 1.0685, "step": 6710 }, { "epoch": 0.34262988833936675, "grad_norm": 4.604250829044841, "learning_rate": 7.649565199918268e-05, "loss": 1.0994, "step": 6720 }, { "epoch": 0.34313975424463367, "grad_norm": 2.5913293214568185, "learning_rate": 7.642559293526137e-05, "loss": 0.9885, "step": 6730 }, { "epoch": 0.3436496201499006, "grad_norm": 2.9520144073304877, "learning_rate": 7.635546180695038e-05, "loss": 0.9502, "step": 6740 }, { "epoch": 0.3441594860551675, "grad_norm": 3.279479685000254, "learning_rate": 7.628525880550209e-05, "loss": 0.9776, "step": 6750 }, { "epoch": 0.3446693519604344, "grad_norm": 3.3985425394454665, "learning_rate": 7.621498412236483e-05, "loss": 1.0407, "step": 6760 }, { "epoch": 0.3451792178657013, "grad_norm": 2.954435423041867, "learning_rate": 7.614463794918246e-05, "loss": 0.9834, "step": 6770 }, { "epoch": 0.34568908377096824, "grad_norm": 3.4446945006052565, "learning_rate": 7.607422047779375e-05, "loss": 1.0197, "step": 6780 }, { "epoch": 0.34619894967623516, "grad_norm": 3.6773551695050912, "learning_rate": 7.600373190023197e-05, "loss": 1.0115, "step": 6790 }, { "epoch": 0.3467088155815021, "grad_norm": 5.390145034760108, "learning_rate": 7.593317240872428e-05, "loss": 0.9882, "step": 6800 }, { "epoch": 0.347218681486769, "grad_norm": 3.018504473047834, "learning_rate": 7.586254219569115e-05, "loss": 0.9818, "step": 6810 }, { "epoch": 0.3477285473920359, "grad_norm": 3.094797158174787, "learning_rate": 7.579184145374604e-05, "loss": 1.0678, "step": 6820 }, { "epoch": 0.3482384132973028, "grad_norm": 3.6178317641775797, "learning_rate": 7.572107037569464e-05, "loss": 0.996, "step": 6830 }, { "epoch": 0.34874827920256973, "grad_norm": 2.965198313478229, "learning_rate": 7.56502291545345e-05, "loss": 1.0568, "step": 6840 }, { "epoch": 0.34925814510783665, "grad_norm": 2.932751731657846, "learning_rate": 7.557931798345448e-05, "loss": 0.9739, "step": 6850 }, { "epoch": 0.34976801101310356, "grad_norm": 6.410663376523632, "learning_rate": 7.550833705583414e-05, "loss": 0.9892, "step": 6860 }, { "epoch": 0.3502778769183705, "grad_norm": 4.132873483531455, "learning_rate": 7.54372865652433e-05, "loss": 0.9613, "step": 6870 }, { "epoch": 0.3507877428236374, "grad_norm": 4.817541784367713, "learning_rate": 7.53661667054415e-05, "loss": 0.9519, "step": 6880 }, { "epoch": 0.3512976087289043, "grad_norm": 2.6834832080268987, "learning_rate": 7.529497767037741e-05, "loss": 0.9636, "step": 6890 }, { "epoch": 0.3518074746341712, "grad_norm": 4.138092874291924, "learning_rate": 7.52237196541884e-05, "loss": 1.055, "step": 6900 }, { "epoch": 0.35231734053943814, "grad_norm": 3.125727623443946, "learning_rate": 7.515239285119988e-05, "loss": 0.9689, "step": 6910 }, { "epoch": 0.35282720644470505, "grad_norm": 4.2695157888907715, "learning_rate": 7.508099745592496e-05, "loss": 0.9804, "step": 6920 }, { "epoch": 0.35333707234997197, "grad_norm": 3.089823322347325, "learning_rate": 7.500953366306369e-05, "loss": 1.0082, "step": 6930 }, { "epoch": 0.3538469382552389, "grad_norm": 3.488758675288725, "learning_rate": 7.493800166750273e-05, "loss": 0.9979, "step": 6940 }, { "epoch": 0.3543568041605058, "grad_norm": 5.360527026709568, "learning_rate": 7.486640166431467e-05, "loss": 1.1085, "step": 6950 }, { "epoch": 0.3548666700657727, "grad_norm": 3.571941990726853, "learning_rate": 7.479473384875759e-05, "loss": 0.9516, "step": 6960 }, { "epoch": 0.35537653597103963, "grad_norm": 4.670046148030324, "learning_rate": 7.472299841627451e-05, "loss": 0.9945, "step": 6970 }, { "epoch": 0.35588640187630655, "grad_norm": 3.8655870091709836, "learning_rate": 7.465119556249285e-05, "loss": 0.9686, "step": 6980 }, { "epoch": 0.35639626778157346, "grad_norm": 4.21089436280745, "learning_rate": 7.457932548322383e-05, "loss": 0.9965, "step": 6990 }, { "epoch": 0.3569061336868404, "grad_norm": 4.328581391824839, "learning_rate": 7.450738837446212e-05, "loss": 0.9494, "step": 7000 }, { "epoch": 0.3574159995921073, "grad_norm": 3.1531910468329905, "learning_rate": 7.443538443238504e-05, "loss": 0.9227, "step": 7010 }, { "epoch": 0.3579258654973742, "grad_norm": 4.081079875781598, "learning_rate": 7.436331385335226e-05, "loss": 1.0305, "step": 7020 }, { "epoch": 0.3584357314026411, "grad_norm": 3.596808195506411, "learning_rate": 7.429117683390516e-05, "loss": 1.0313, "step": 7030 }, { "epoch": 0.35894559730790804, "grad_norm": 3.040454003052074, "learning_rate": 7.421897357076628e-05, "loss": 0.9737, "step": 7040 }, { "epoch": 0.35945546321317495, "grad_norm": 3.475180199633449, "learning_rate": 7.414670426083887e-05, "loss": 1.1099, "step": 7050 }, { "epoch": 0.35996532911844187, "grad_norm": 3.5186146669369354, "learning_rate": 7.40743691012062e-05, "loss": 1.0318, "step": 7060 }, { "epoch": 0.3604751950237088, "grad_norm": 3.6010935658896837, "learning_rate": 7.400196828913123e-05, "loss": 1.0393, "step": 7070 }, { "epoch": 0.3609850609289757, "grad_norm": 3.964601718404231, "learning_rate": 7.392950202205586e-05, "loss": 0.9936, "step": 7080 }, { "epoch": 0.3614949268342426, "grad_norm": 3.287468202523833, "learning_rate": 7.385697049760054e-05, "loss": 0.9642, "step": 7090 }, { "epoch": 0.36200479273950953, "grad_norm": 5.000572627473819, "learning_rate": 7.378437391356367e-05, "loss": 0.9789, "step": 7100 }, { "epoch": 0.36251465864477644, "grad_norm": 4.145564403281979, "learning_rate": 7.37117124679211e-05, "loss": 1.01, "step": 7110 }, { "epoch": 0.36302452455004336, "grad_norm": 5.746731477878051, "learning_rate": 7.36389863588255e-05, "loss": 0.951, "step": 7120 }, { "epoch": 0.3635343904553103, "grad_norm": 3.5135881735177934, "learning_rate": 7.356619578460593e-05, "loss": 1.0421, "step": 7130 }, { "epoch": 0.3640442563605772, "grad_norm": 3.6815592879194092, "learning_rate": 7.349334094376723e-05, "loss": 1.0099, "step": 7140 }, { "epoch": 0.3645541222658441, "grad_norm": 4.00363004751103, "learning_rate": 7.342042203498951e-05, "loss": 0.9899, "step": 7150 }, { "epoch": 0.365063988171111, "grad_norm": 2.6832488579144758, "learning_rate": 7.334743925712762e-05, "loss": 0.983, "step": 7160 }, { "epoch": 0.36557385407637794, "grad_norm": 3.841794389998567, "learning_rate": 7.327439280921051e-05, "loss": 0.9575, "step": 7170 }, { "epoch": 0.36608371998164485, "grad_norm": 3.29136888447932, "learning_rate": 7.320128289044086e-05, "loss": 0.962, "step": 7180 }, { "epoch": 0.36659358588691177, "grad_norm": 2.4127413229065793, "learning_rate": 7.312810970019439e-05, "loss": 0.9729, "step": 7190 }, { "epoch": 0.3671034517921787, "grad_norm": 3.05054193268961, "learning_rate": 7.305487343801933e-05, "loss": 0.9759, "step": 7200 }, { "epoch": 0.3676133176974456, "grad_norm": 3.4846575062743717, "learning_rate": 7.298157430363596e-05, "loss": 0.9274, "step": 7210 }, { "epoch": 0.3681231836027125, "grad_norm": 3.2196195502113105, "learning_rate": 7.290821249693605e-05, "loss": 1.0103, "step": 7220 }, { "epoch": 0.3686330495079794, "grad_norm": 2.9843784506611004, "learning_rate": 7.283478821798219e-05, "loss": 0.9995, "step": 7230 }, { "epoch": 0.36914291541324634, "grad_norm": 5.753179910368323, "learning_rate": 7.27613016670074e-05, "loss": 1.0065, "step": 7240 }, { "epoch": 0.36965278131851326, "grad_norm": 4.367496528660646, "learning_rate": 7.268775304441451e-05, "loss": 1.0498, "step": 7250 }, { "epoch": 0.3701626472237802, "grad_norm": 4.256618840977742, "learning_rate": 7.26141425507756e-05, "loss": 1.0123, "step": 7260 }, { "epoch": 0.3706725131290471, "grad_norm": 3.928964763724638, "learning_rate": 7.25404703868315e-05, "loss": 0.9618, "step": 7270 }, { "epoch": 0.371182379034314, "grad_norm": 2.824065385221502, "learning_rate": 7.246673675349123e-05, "loss": 1.0308, "step": 7280 }, { "epoch": 0.37169224493958086, "grad_norm": 2.698890858874962, "learning_rate": 7.239294185183141e-05, "loss": 0.9943, "step": 7290 }, { "epoch": 0.3722021108448478, "grad_norm": 3.0964082743027794, "learning_rate": 7.231908588309576e-05, "loss": 0.9711, "step": 7300 }, { "epoch": 0.3727119767501147, "grad_norm": 3.927548468490229, "learning_rate": 7.224516904869453e-05, "loss": 0.9781, "step": 7310 }, { "epoch": 0.3732218426553816, "grad_norm": 3.4194993221090293, "learning_rate": 7.217119155020396e-05, "loss": 0.9691, "step": 7320 }, { "epoch": 0.3737317085606485, "grad_norm": 2.56858575281676, "learning_rate": 7.20971535893657e-05, "loss": 0.951, "step": 7330 }, { "epoch": 0.37424157446591544, "grad_norm": 5.285451925259465, "learning_rate": 7.202305536808633e-05, "loss": 1.0044, "step": 7340 }, { "epoch": 0.37475144037118235, "grad_norm": 3.029951338784045, "learning_rate": 7.194889708843673e-05, "loss": 0.9914, "step": 7350 }, { "epoch": 0.37526130627644927, "grad_norm": 5.020696791673093, "learning_rate": 7.18746789526516e-05, "loss": 0.9958, "step": 7360 }, { "epoch": 0.3757711721817162, "grad_norm": 4.566688407768787, "learning_rate": 7.180040116312881e-05, "loss": 1.0929, "step": 7370 }, { "epoch": 0.3762810380869831, "grad_norm": 3.872358773127792, "learning_rate": 7.172606392242894e-05, "loss": 0.9853, "step": 7380 }, { "epoch": 0.37679090399225, "grad_norm": 3.4473918049417667, "learning_rate": 7.165166743327473e-05, "loss": 1.0171, "step": 7390 }, { "epoch": 0.37730076989751693, "grad_norm": 4.071097703939284, "learning_rate": 7.157721189855047e-05, "loss": 0.95, "step": 7400 }, { "epoch": 0.37781063580278385, "grad_norm": 4.738454803206081, "learning_rate": 7.150269752130144e-05, "loss": 1.0548, "step": 7410 }, { "epoch": 0.37832050170805076, "grad_norm": 3.211095977619034, "learning_rate": 7.142812450473344e-05, "loss": 1.0732, "step": 7420 }, { "epoch": 0.3788303676133177, "grad_norm": 2.408010202469844, "learning_rate": 7.135349305221216e-05, "loss": 1.055, "step": 7430 }, { "epoch": 0.3793402335185846, "grad_norm": 3.738509463429098, "learning_rate": 7.127880336726262e-05, "loss": 1.0725, "step": 7440 }, { "epoch": 0.3798500994238515, "grad_norm": 3.6802881504337472, "learning_rate": 7.120405565356871e-05, "loss": 1.0432, "step": 7450 }, { "epoch": 0.3803599653291184, "grad_norm": 6.50944828265682, "learning_rate": 7.11292501149725e-05, "loss": 1.0184, "step": 7460 }, { "epoch": 0.38086983123438534, "grad_norm": 2.8461494658896944, "learning_rate": 7.105438695547381e-05, "loss": 0.9833, "step": 7470 }, { "epoch": 0.38137969713965225, "grad_norm": 2.8845637706342444, "learning_rate": 7.097946637922955e-05, "loss": 0.926, "step": 7480 }, { "epoch": 0.38188956304491917, "grad_norm": 4.966386983635893, "learning_rate": 7.090448859055321e-05, "loss": 1.0482, "step": 7490 }, { "epoch": 0.3823994289501861, "grad_norm": 3.8734273182198353, "learning_rate": 7.082945379391436e-05, "loss": 0.9855, "step": 7500 }, { "epoch": 0.382909294855453, "grad_norm": 7.2654653468941, "learning_rate": 7.075436219393798e-05, "loss": 1.0067, "step": 7510 }, { "epoch": 0.3834191607607199, "grad_norm": 2.6321094110314145, "learning_rate": 7.067921399540395e-05, "loss": 0.9443, "step": 7520 }, { "epoch": 0.38392902666598683, "grad_norm": 3.772431198509487, "learning_rate": 7.060400940324655e-05, "loss": 1.0275, "step": 7530 }, { "epoch": 0.38443889257125374, "grad_norm": 2.864068961873481, "learning_rate": 7.052874862255381e-05, "loss": 0.9, "step": 7540 }, { "epoch": 0.38494875847652066, "grad_norm": 3.244954947277513, "learning_rate": 7.045343185856701e-05, "loss": 1.0231, "step": 7550 }, { "epoch": 0.3854586243817876, "grad_norm": 2.9501480598286625, "learning_rate": 7.037805931668005e-05, "loss": 1.0132, "step": 7560 }, { "epoch": 0.3859684902870545, "grad_norm": 4.033800846554501, "learning_rate": 7.030263120243902e-05, "loss": 0.9507, "step": 7570 }, { "epoch": 0.3864783561923214, "grad_norm": 4.935920805568241, "learning_rate": 7.02271477215415e-05, "loss": 1.0124, "step": 7580 }, { "epoch": 0.3869882220975883, "grad_norm": 2.9909597802655394, "learning_rate": 7.015160907983609e-05, "loss": 1.08, "step": 7590 }, { "epoch": 0.38749808800285523, "grad_norm": 3.0885134763230826, "learning_rate": 7.007601548332179e-05, "loss": 0.9899, "step": 7600 }, { "epoch": 0.38800795390812215, "grad_norm": 4.085304060769854, "learning_rate": 7.000036713814749e-05, "loss": 1.0568, "step": 7610 }, { "epoch": 0.38851781981338906, "grad_norm": 2.8903100587581356, "learning_rate": 6.992466425061137e-05, "loss": 0.9689, "step": 7620 }, { "epoch": 0.389027685718656, "grad_norm": 3.4937069513104286, "learning_rate": 6.984890702716036e-05, "loss": 1.0279, "step": 7630 }, { "epoch": 0.3895375516239229, "grad_norm": 2.5446672367985266, "learning_rate": 6.977309567438954e-05, "loss": 0.9681, "step": 7640 }, { "epoch": 0.3900474175291898, "grad_norm": 3.2734287563527222, "learning_rate": 6.969723039904166e-05, "loss": 0.9758, "step": 7650 }, { "epoch": 0.3905572834344567, "grad_norm": 4.759649973235408, "learning_rate": 6.962131140800647e-05, "loss": 0.9751, "step": 7660 }, { "epoch": 0.39106714933972364, "grad_norm": 3.5749996959546926, "learning_rate": 6.954533890832023e-05, "loss": 0.9652, "step": 7670 }, { "epoch": 0.39157701524499056, "grad_norm": 4.91677035555623, "learning_rate": 6.94693131071651e-05, "loss": 0.9514, "step": 7680 }, { "epoch": 0.39208688115025747, "grad_norm": 3.155742449847258, "learning_rate": 6.939323421186861e-05, "loss": 0.9955, "step": 7690 }, { "epoch": 0.3925967470555244, "grad_norm": 5.310305673644114, "learning_rate": 6.931710242990312e-05, "loss": 0.8624, "step": 7700 }, { "epoch": 0.3931066129607913, "grad_norm": 3.5806355390430546, "learning_rate": 6.924091796888512e-05, "loss": 0.9596, "step": 7710 }, { "epoch": 0.3936164788660582, "grad_norm": 2.561795181295556, "learning_rate": 6.916468103657489e-05, "loss": 0.9755, "step": 7720 }, { "epoch": 0.39412634477132513, "grad_norm": 2.64332129127512, "learning_rate": 6.908839184087566e-05, "loss": 0.972, "step": 7730 }, { "epoch": 0.39463621067659205, "grad_norm": 3.78250238500619, "learning_rate": 6.901205058983332e-05, "loss": 1.0044, "step": 7740 }, { "epoch": 0.39514607658185896, "grad_norm": 5.87662489302249, "learning_rate": 6.89356574916356e-05, "loss": 1.0159, "step": 7750 }, { "epoch": 0.3956559424871259, "grad_norm": 3.9235098033032223, "learning_rate": 6.885921275461168e-05, "loss": 1.0609, "step": 7760 }, { "epoch": 0.3961658083923928, "grad_norm": 2.6444250632626147, "learning_rate": 6.878271658723162e-05, "loss": 0.9404, "step": 7770 }, { "epoch": 0.3966756742976597, "grad_norm": 4.425412693097132, "learning_rate": 6.870616919810562e-05, "loss": 1.0248, "step": 7780 }, { "epoch": 0.3971855402029266, "grad_norm": 5.1067176297252095, "learning_rate": 6.862957079598362e-05, "loss": 0.9419, "step": 7790 }, { "epoch": 0.39769540610819354, "grad_norm": 5.424418090130069, "learning_rate": 6.855292158975468e-05, "loss": 0.979, "step": 7800 }, { "epoch": 0.39820527201346045, "grad_norm": 4.041976527019181, "learning_rate": 6.84762217884464e-05, "loss": 1.0984, "step": 7810 }, { "epoch": 0.39871513791872737, "grad_norm": 3.0713325053672857, "learning_rate": 6.839947160122436e-05, "loss": 0.9948, "step": 7820 }, { "epoch": 0.3992250038239943, "grad_norm": 5.283031713528076, "learning_rate": 6.832267123739154e-05, "loss": 1.0389, "step": 7830 }, { "epoch": 0.3997348697292612, "grad_norm": 3.405740106561875, "learning_rate": 6.824582090638777e-05, "loss": 0.9844, "step": 7840 }, { "epoch": 0.4002447356345281, "grad_norm": 4.05474377505947, "learning_rate": 6.81689208177891e-05, "loss": 1.0185, "step": 7850 }, { "epoch": 0.40075460153979503, "grad_norm": 4.218173684188458, "learning_rate": 6.809197118130734e-05, "loss": 1.0217, "step": 7860 }, { "epoch": 0.40126446744506195, "grad_norm": 5.375351162624322, "learning_rate": 6.801497220678935e-05, "loss": 1.0078, "step": 7870 }, { "epoch": 0.40177433335032886, "grad_norm": 2.3886255403553047, "learning_rate": 6.793792410421658e-05, "loss": 0.9489, "step": 7880 }, { "epoch": 0.4022841992555958, "grad_norm": 4.616915049037168, "learning_rate": 6.786082708370447e-05, "loss": 1.0114, "step": 7890 }, { "epoch": 0.4027940651608627, "grad_norm": 3.9455847324621143, "learning_rate": 6.778368135550182e-05, "loss": 0.974, "step": 7900 }, { "epoch": 0.4033039310661296, "grad_norm": 4.82991485201106, "learning_rate": 6.770648712999025e-05, "loss": 0.9652, "step": 7910 }, { "epoch": 0.4038137969713965, "grad_norm": 2.9101742427326, "learning_rate": 6.762924461768375e-05, "loss": 0.953, "step": 7920 }, { "epoch": 0.40432366287666344, "grad_norm": 3.3596524918735917, "learning_rate": 6.755195402922781e-05, "loss": 0.9509, "step": 7930 }, { "epoch": 0.40483352878193035, "grad_norm": 4.225953624249036, "learning_rate": 6.747461557539918e-05, "loss": 0.9973, "step": 7940 }, { "epoch": 0.40534339468719727, "grad_norm": 3.5903876664997485, "learning_rate": 6.739722946710507e-05, "loss": 0.9357, "step": 7950 }, { "epoch": 0.4058532605924642, "grad_norm": 5.3606943141476195, "learning_rate": 6.731979591538267e-05, "loss": 1.0182, "step": 7960 }, { "epoch": 0.4063631264977311, "grad_norm": 3.2153408156011287, "learning_rate": 6.724231513139852e-05, "loss": 1.0024, "step": 7970 }, { "epoch": 0.406872992402998, "grad_norm": 3.292294552670899, "learning_rate": 6.716478732644802e-05, "loss": 0.9274, "step": 7980 }, { "epoch": 0.40738285830826493, "grad_norm": 2.7317030544391163, "learning_rate": 6.708721271195476e-05, "loss": 0.964, "step": 7990 }, { "epoch": 0.40789272421353184, "grad_norm": 4.092024090983047, "learning_rate": 6.700959149946996e-05, "loss": 1.0025, "step": 8000 }, { "epoch": 0.40840259011879876, "grad_norm": 2.788397450491997, "learning_rate": 6.693192390067199e-05, "loss": 0.9709, "step": 8010 }, { "epoch": 0.4089124560240657, "grad_norm": 3.4119060630259117, "learning_rate": 6.685421012736563e-05, "loss": 0.9564, "step": 8020 }, { "epoch": 0.4094223219293326, "grad_norm": 2.9489807050382186, "learning_rate": 6.677645039148168e-05, "loss": 0.9713, "step": 8030 }, { "epoch": 0.4099321878345995, "grad_norm": 2.817060206884971, "learning_rate": 6.66986449050762e-05, "loss": 0.9081, "step": 8040 }, { "epoch": 0.4104420537398664, "grad_norm": 3.780284199420438, "learning_rate": 6.662079388033004e-05, "loss": 1.0138, "step": 8050 }, { "epoch": 0.41095191964513333, "grad_norm": 3.5269807271137155, "learning_rate": 6.654289752954826e-05, "loss": 1.0603, "step": 8060 }, { "epoch": 0.41146178555040025, "grad_norm": 3.135683553089666, "learning_rate": 6.646495606515949e-05, "loss": 0.9733, "step": 8070 }, { "epoch": 0.41197165145566716, "grad_norm": 2.7791201230184592, "learning_rate": 6.638696969971542e-05, "loss": 0.9089, "step": 8080 }, { "epoch": 0.4124815173609341, "grad_norm": 2.404636007860764, "learning_rate": 6.630893864589016e-05, "loss": 0.9515, "step": 8090 }, { "epoch": 0.412991383266201, "grad_norm": 3.9092946580394634, "learning_rate": 6.62308631164797e-05, "loss": 1.0245, "step": 8100 }, { "epoch": 0.4135012491714679, "grad_norm": 2.8819181887521177, "learning_rate": 6.615274332440134e-05, "loss": 0.9682, "step": 8110 }, { "epoch": 0.4140111150767348, "grad_norm": 4.201890582364877, "learning_rate": 6.607457948269305e-05, "loss": 0.9545, "step": 8120 }, { "epoch": 0.41452098098200174, "grad_norm": 3.0616475395519043, "learning_rate": 6.599637180451294e-05, "loss": 1.001, "step": 8130 }, { "epoch": 0.41503084688726866, "grad_norm": 6.135332887534604, "learning_rate": 6.59181205031387e-05, "loss": 1.0417, "step": 8140 }, { "epoch": 0.41554071279253557, "grad_norm": 3.3915570825360426, "learning_rate": 6.583982579196693e-05, "loss": 0.9526, "step": 8150 }, { "epoch": 0.4160505786978025, "grad_norm": 5.130771096453491, "learning_rate": 6.576148788451264e-05, "loss": 0.9996, "step": 8160 }, { "epoch": 0.4165604446030694, "grad_norm": 4.225003471130253, "learning_rate": 6.568310699440861e-05, "loss": 1.0112, "step": 8170 }, { "epoch": 0.4170703105083363, "grad_norm": 4.195416717849394, "learning_rate": 6.56046833354049e-05, "loss": 1.022, "step": 8180 }, { "epoch": 0.41758017641360323, "grad_norm": 4.349548622404095, "learning_rate": 6.552621712136812e-05, "loss": 1.0196, "step": 8190 }, { "epoch": 0.41809004231887015, "grad_norm": 5.608295368338538, "learning_rate": 6.544770856628099e-05, "loss": 0.9041, "step": 8200 }, { "epoch": 0.41859990822413706, "grad_norm": 2.464411009934411, "learning_rate": 6.536915788424171e-05, "loss": 0.9406, "step": 8210 }, { "epoch": 0.419109774129404, "grad_norm": 3.1139854599901136, "learning_rate": 6.529056528946329e-05, "loss": 0.979, "step": 8220 }, { "epoch": 0.4196196400346709, "grad_norm": 4.065458780063512, "learning_rate": 6.521193099627311e-05, "loss": 0.991, "step": 8230 }, { "epoch": 0.4201295059399378, "grad_norm": 2.774278033784981, "learning_rate": 6.513325521911223e-05, "loss": 0.9665, "step": 8240 }, { "epoch": 0.4206393718452047, "grad_norm": 3.723337247910771, "learning_rate": 6.505453817253483e-05, "loss": 1.0147, "step": 8250 }, { "epoch": 0.42114923775047164, "grad_norm": 3.0144132742908103, "learning_rate": 6.49757800712077e-05, "loss": 0.9086, "step": 8260 }, { "epoch": 0.42165910365573855, "grad_norm": 3.76467661836284, "learning_rate": 6.489698112990949e-05, "loss": 0.9483, "step": 8270 }, { "epoch": 0.42216896956100547, "grad_norm": 5.533722695705143, "learning_rate": 6.481814156353028e-05, "loss": 0.9709, "step": 8280 }, { "epoch": 0.4226788354662724, "grad_norm": 4.402153778706855, "learning_rate": 6.473926158707095e-05, "loss": 0.9836, "step": 8290 }, { "epoch": 0.4231887013715393, "grad_norm": 3.7297200027269692, "learning_rate": 6.466034141564256e-05, "loss": 0.9816, "step": 8300 }, { "epoch": 0.4236985672768062, "grad_norm": 2.9872321174303638, "learning_rate": 6.458138126446578e-05, "loss": 0.9566, "step": 8310 }, { "epoch": 0.42420843318207313, "grad_norm": 4.776691500929038, "learning_rate": 6.45023813488703e-05, "loss": 1.003, "step": 8320 }, { "epoch": 0.42471829908734005, "grad_norm": 3.590468781706842, "learning_rate": 6.442334188429429e-05, "loss": 0.9772, "step": 8330 }, { "epoch": 0.42522816499260696, "grad_norm": 3.8608403740136468, "learning_rate": 6.434426308628373e-05, "loss": 0.9319, "step": 8340 }, { "epoch": 0.4257380308978739, "grad_norm": 4.444880659868321, "learning_rate": 6.426514517049189e-05, "loss": 1.0666, "step": 8350 }, { "epoch": 0.4262478968031408, "grad_norm": 3.9726356268299745, "learning_rate": 6.418598835267872e-05, "loss": 1.0054, "step": 8360 }, { "epoch": 0.4267577627084077, "grad_norm": 3.0163954750719095, "learning_rate": 6.41067928487102e-05, "loss": 1.0377, "step": 8370 }, { "epoch": 0.4272676286136746, "grad_norm": 2.535923278426502, "learning_rate": 6.402755887455792e-05, "loss": 1.0057, "step": 8380 }, { "epoch": 0.42777749451894154, "grad_norm": 3.3930949309239304, "learning_rate": 6.394828664629828e-05, "loss": 0.8832, "step": 8390 }, { "epoch": 0.42828736042420845, "grad_norm": 3.059493768433064, "learning_rate": 6.386897638011206e-05, "loss": 1.0194, "step": 8400 }, { "epoch": 0.42879722632947537, "grad_norm": 2.5297070537029023, "learning_rate": 6.378962829228371e-05, "loss": 0.989, "step": 8410 }, { "epoch": 0.4293070922347423, "grad_norm": 3.9679756011783525, "learning_rate": 6.371024259920091e-05, "loss": 1.0448, "step": 8420 }, { "epoch": 0.4298169581400092, "grad_norm": 2.661923982062292, "learning_rate": 6.363081951735384e-05, "loss": 1.0615, "step": 8430 }, { "epoch": 0.4303268240452761, "grad_norm": 3.435589450600634, "learning_rate": 6.35513592633346e-05, "loss": 1.0189, "step": 8440 }, { "epoch": 0.43083668995054303, "grad_norm": 2.9961841597196672, "learning_rate": 6.347186205383678e-05, "loss": 1.0101, "step": 8450 }, { "epoch": 0.43134655585580994, "grad_norm": 3.1285804783723883, "learning_rate": 6.339232810565463e-05, "loss": 0.9586, "step": 8460 }, { "epoch": 0.43185642176107686, "grad_norm": 4.510792797789835, "learning_rate": 6.331275763568266e-05, "loss": 0.9259, "step": 8470 }, { "epoch": 0.4323662876663438, "grad_norm": 4.204311113244711, "learning_rate": 6.323315086091494e-05, "loss": 1.0109, "step": 8480 }, { "epoch": 0.4328761535716107, "grad_norm": 3.287802283355501, "learning_rate": 6.315350799844455e-05, "loss": 0.9761, "step": 8490 }, { "epoch": 0.4333860194768776, "grad_norm": 5.124636269441518, "learning_rate": 6.307382926546303e-05, "loss": 1.0093, "step": 8500 }, { "epoch": 0.4338958853821445, "grad_norm": 3.224727245415259, "learning_rate": 6.299411487925967e-05, "loss": 0.9701, "step": 8510 }, { "epoch": 0.43440575128741143, "grad_norm": 4.10442689099236, "learning_rate": 6.291436505722105e-05, "loss": 0.9962, "step": 8520 }, { "epoch": 0.43491561719267835, "grad_norm": 3.134233230716742, "learning_rate": 6.283458001683033e-05, "loss": 0.9685, "step": 8530 }, { "epoch": 0.43542548309794527, "grad_norm": 4.416029020529532, "learning_rate": 6.275475997566679e-05, "loss": 0.9539, "step": 8540 }, { "epoch": 0.4359353490032122, "grad_norm": 3.2852254179593254, "learning_rate": 6.267490515140506e-05, "loss": 1.0486, "step": 8550 }, { "epoch": 0.4364452149084791, "grad_norm": 3.814048589694972, "learning_rate": 6.259501576181471e-05, "loss": 0.9066, "step": 8560 }, { "epoch": 0.436955080813746, "grad_norm": 3.198841271697382, "learning_rate": 6.251509202475955e-05, "loss": 0.9519, "step": 8570 }, { "epoch": 0.4374649467190129, "grad_norm": 3.647237279022868, "learning_rate": 6.243513415819701e-05, "loss": 0.9508, "step": 8580 }, { "epoch": 0.43797481262427984, "grad_norm": 3.0249604860517407, "learning_rate": 6.235514238017767e-05, "loss": 0.9915, "step": 8590 }, { "epoch": 0.43848467852954676, "grad_norm": 4.31484540170633, "learning_rate": 6.227511690884454e-05, "loss": 0.9684, "step": 8600 }, { "epoch": 0.43899454443481367, "grad_norm": 2.594466421786102, "learning_rate": 6.219505796243248e-05, "loss": 0.9741, "step": 8610 }, { "epoch": 0.43950441034008053, "grad_norm": 3.7573732890106033, "learning_rate": 6.211496575926775e-05, "loss": 1.0002, "step": 8620 }, { "epoch": 0.44001427624534745, "grad_norm": 2.9838071187088744, "learning_rate": 6.203484051776721e-05, "loss": 0.9893, "step": 8630 }, { "epoch": 0.44052414215061436, "grad_norm": 3.1365011892344676, "learning_rate": 6.195468245643783e-05, "loss": 0.9622, "step": 8640 }, { "epoch": 0.4410340080558813, "grad_norm": 2.7563754695966582, "learning_rate": 6.187449179387613e-05, "loss": 0.9331, "step": 8650 }, { "epoch": 0.4415438739611482, "grad_norm": 2.7666951720052, "learning_rate": 6.179426874876746e-05, "loss": 0.9478, "step": 8660 }, { "epoch": 0.4420537398664151, "grad_norm": 2.920138964629528, "learning_rate": 6.171401353988553e-05, "loss": 0.9235, "step": 8670 }, { "epoch": 0.442563605771682, "grad_norm": 2.967699108126368, "learning_rate": 6.163372638609179e-05, "loss": 0.9343, "step": 8680 }, { "epoch": 0.44307347167694894, "grad_norm": 2.7526313148316595, "learning_rate": 6.155340750633476e-05, "loss": 0.9425, "step": 8690 }, { "epoch": 0.44358333758221585, "grad_norm": 4.045087410164233, "learning_rate": 6.147305711964946e-05, "loss": 0.9273, "step": 8700 }, { "epoch": 0.44409320348748277, "grad_norm": 2.670856320943986, "learning_rate": 6.139267544515689e-05, "loss": 0.9904, "step": 8710 }, { "epoch": 0.4446030693927497, "grad_norm": 3.9351489580031873, "learning_rate": 6.131226270206332e-05, "loss": 0.9996, "step": 8720 }, { "epoch": 0.4451129352980166, "grad_norm": 5.942247351038859, "learning_rate": 6.123181910965979e-05, "loss": 0.9493, "step": 8730 }, { "epoch": 0.4456228012032835, "grad_norm": 2.79740791815727, "learning_rate": 6.115134488732143e-05, "loss": 0.9022, "step": 8740 }, { "epoch": 0.44613266710855043, "grad_norm": 3.4550178446015143, "learning_rate": 6.107084025450693e-05, "loss": 1.0, "step": 8750 }, { "epoch": 0.44664253301381734, "grad_norm": 2.5156734942081553, "learning_rate": 6.099030543075792e-05, "loss": 0.9552, "step": 8760 }, { "epoch": 0.44715239891908426, "grad_norm": 3.421439146165748, "learning_rate": 6.090974063569832e-05, "loss": 0.9912, "step": 8770 }, { "epoch": 0.4476622648243512, "grad_norm": 2.8395317501384714, "learning_rate": 6.082914608903382e-05, "loss": 1.0194, "step": 8780 }, { "epoch": 0.4481721307296181, "grad_norm": 3.107058432155688, "learning_rate": 6.0748522010551215e-05, "loss": 0.9673, "step": 8790 }, { "epoch": 0.448681996634885, "grad_norm": 3.527942051818049, "learning_rate": 6.066786862011785e-05, "loss": 0.9556, "step": 8800 }, { "epoch": 0.4491918625401519, "grad_norm": 3.6355047391327884, "learning_rate": 6.058718613768103e-05, "loss": 1.015, "step": 8810 }, { "epoch": 0.44970172844541884, "grad_norm": 3.105033490007051, "learning_rate": 6.050647478326736e-05, "loss": 0.9699, "step": 8820 }, { "epoch": 0.45021159435068575, "grad_norm": 2.5004745560856096, "learning_rate": 6.0425734776982204e-05, "loss": 0.9851, "step": 8830 }, { "epoch": 0.45072146025595267, "grad_norm": 4.450216770629844, "learning_rate": 6.034496633900903e-05, "loss": 0.9703, "step": 8840 }, { "epoch": 0.4512313261612196, "grad_norm": 4.197061587642375, "learning_rate": 6.0264169689608886e-05, "loss": 1.0017, "step": 8850 }, { "epoch": 0.4517411920664865, "grad_norm": 2.6986562343418656, "learning_rate": 6.018334504911971e-05, "loss": 0.9502, "step": 8860 }, { "epoch": 0.4522510579717534, "grad_norm": 3.0511897830278594, "learning_rate": 6.010249263795582e-05, "loss": 0.99, "step": 8870 }, { "epoch": 0.4527609238770203, "grad_norm": 3.999753683504353, "learning_rate": 6.002161267660723e-05, "loss": 1.1003, "step": 8880 }, { "epoch": 0.45327078978228724, "grad_norm": 4.933276773940487, "learning_rate": 5.994070538563909e-05, "loss": 1.0102, "step": 8890 }, { "epoch": 0.45378065568755416, "grad_norm": 3.629194455464756, "learning_rate": 5.985977098569111e-05, "loss": 0.9832, "step": 8900 }, { "epoch": 0.4542905215928211, "grad_norm": 3.204958560049805, "learning_rate": 5.977880969747687e-05, "loss": 0.9695, "step": 8910 }, { "epoch": 0.454800387498088, "grad_norm": 2.6867147044360586, "learning_rate": 5.969782174178333e-05, "loss": 0.9656, "step": 8920 }, { "epoch": 0.4553102534033549, "grad_norm": 3.034488103300888, "learning_rate": 5.961680733947016e-05, "loss": 1.0475, "step": 8930 }, { "epoch": 0.4558201193086218, "grad_norm": 3.5236433196919985, "learning_rate": 5.953576671146913e-05, "loss": 0.981, "step": 8940 }, { "epoch": 0.45632998521388873, "grad_norm": 2.8968669103912195, "learning_rate": 5.945470007878356e-05, "loss": 1.0058, "step": 8950 }, { "epoch": 0.45683985111915565, "grad_norm": 3.1366228767198128, "learning_rate": 5.937360766248767e-05, "loss": 0.9852, "step": 8960 }, { "epoch": 0.45734971702442256, "grad_norm": 2.897785110109335, "learning_rate": 5.9292489683725996e-05, "loss": 0.9616, "step": 8970 }, { "epoch": 0.4578595829296895, "grad_norm": 4.21341205269237, "learning_rate": 5.921134636371277e-05, "loss": 0.9881, "step": 8980 }, { "epoch": 0.4583694488349564, "grad_norm": 3.0001706706722926, "learning_rate": 5.9130177923731376e-05, "loss": 1.0046, "step": 8990 }, { "epoch": 0.4588793147402233, "grad_norm": 5.748411307948442, "learning_rate": 5.9048984585133646e-05, "loss": 0.9856, "step": 9000 }, { "epoch": 0.4593891806454902, "grad_norm": 2.9060378090177035, "learning_rate": 5.896776656933936e-05, "loss": 0.9629, "step": 9010 }, { "epoch": 0.45989904655075714, "grad_norm": 2.9862141861239335, "learning_rate": 5.8886524097835536e-05, "loss": 0.9635, "step": 9020 }, { "epoch": 0.46040891245602406, "grad_norm": 3.6461912452907668, "learning_rate": 5.880525739217596e-05, "loss": 1.0202, "step": 9030 }, { "epoch": 0.46091877836129097, "grad_norm": 2.9584589049079453, "learning_rate": 5.872396667398043e-05, "loss": 0.9016, "step": 9040 }, { "epoch": 0.4614286442665579, "grad_norm": 3.157536961051754, "learning_rate": 5.86426521649343e-05, "loss": 0.9571, "step": 9050 }, { "epoch": 0.4619385101718248, "grad_norm": 3.1010631758883243, "learning_rate": 5.856131408678776e-05, "loss": 0.9395, "step": 9060 }, { "epoch": 0.4624483760770917, "grad_norm": 2.92934162223403, "learning_rate": 5.847995266135525e-05, "loss": 0.9919, "step": 9070 }, { "epoch": 0.46295824198235863, "grad_norm": 3.1196293404058792, "learning_rate": 5.839856811051496e-05, "loss": 1.013, "step": 9080 }, { "epoch": 0.46346810788762555, "grad_norm": 4.47180888962446, "learning_rate": 5.831716065620807e-05, "loss": 0.916, "step": 9090 }, { "epoch": 0.46397797379289246, "grad_norm": 3.6209260594149937, "learning_rate": 5.8235730520438236e-05, "loss": 1.0086, "step": 9100 }, { "epoch": 0.4644878396981594, "grad_norm": 3.197672960765441, "learning_rate": 5.815427792527102e-05, "loss": 0.9278, "step": 9110 }, { "epoch": 0.4649977056034263, "grad_norm": 3.8971007511057745, "learning_rate": 5.8072803092833136e-05, "loss": 1.0436, "step": 9120 }, { "epoch": 0.4655075715086932, "grad_norm": 3.167182893353007, "learning_rate": 5.7991306245312036e-05, "loss": 0.9341, "step": 9130 }, { "epoch": 0.4660174374139601, "grad_norm": 3.540484608219236, "learning_rate": 5.7909787604955156e-05, "loss": 0.9481, "step": 9140 }, { "epoch": 0.46652730331922704, "grad_norm": 3.074758054177519, "learning_rate": 5.782824739406938e-05, "loss": 0.9173, "step": 9150 }, { "epoch": 0.46703716922449395, "grad_norm": 3.1121036519918612, "learning_rate": 5.774668583502037e-05, "loss": 0.9751, "step": 9160 }, { "epoch": 0.46754703512976087, "grad_norm": 4.404750026090895, "learning_rate": 5.76651031502321e-05, "loss": 0.9686, "step": 9170 }, { "epoch": 0.4680569010350278, "grad_norm": 3.087422903467044, "learning_rate": 5.758349956218607e-05, "loss": 0.9619, "step": 9180 }, { "epoch": 0.4685667669402947, "grad_norm": 2.5920682404244313, "learning_rate": 5.750187529342082e-05, "loss": 0.9127, "step": 9190 }, { "epoch": 0.4690766328455616, "grad_norm": 3.2522582962238995, "learning_rate": 5.742023056653131e-05, "loss": 1.0514, "step": 9200 }, { "epoch": 0.46958649875082853, "grad_norm": 2.151734209698684, "learning_rate": 5.7338565604168236e-05, "loss": 0.911, "step": 9210 }, { "epoch": 0.47009636465609544, "grad_norm": 2.5390936845899215, "learning_rate": 5.72568806290375e-05, "loss": 1.0026, "step": 9220 }, { "epoch": 0.47060623056136236, "grad_norm": 4.168993111095817, "learning_rate": 5.717517586389961e-05, "loss": 0.9082, "step": 9230 }, { "epoch": 0.4711160964666293, "grad_norm": 3.35047102852213, "learning_rate": 5.709345153156902e-05, "loss": 0.9441, "step": 9240 }, { "epoch": 0.4716259623718962, "grad_norm": 3.4495073915702936, "learning_rate": 5.701170785491352e-05, "loss": 0.9034, "step": 9250 }, { "epoch": 0.4721358282771631, "grad_norm": 2.896625667681641, "learning_rate": 5.692994505685369e-05, "loss": 0.9081, "step": 9260 }, { "epoch": 0.47264569418243, "grad_norm": 4.579982562564233, "learning_rate": 5.684816336036224e-05, "loss": 1.0853, "step": 9270 }, { "epoch": 0.47315556008769694, "grad_norm": 4.355257890293027, "learning_rate": 5.676636298846343e-05, "loss": 0.986, "step": 9280 }, { "epoch": 0.47366542599296385, "grad_norm": 4.107352245000783, "learning_rate": 5.668454416423242e-05, "loss": 0.9698, "step": 9290 }, { "epoch": 0.47417529189823077, "grad_norm": 4.093149425772111, "learning_rate": 5.660270711079474e-05, "loss": 0.9642, "step": 9300 }, { "epoch": 0.4746851578034977, "grad_norm": 5.297660098419542, "learning_rate": 5.652085205132558e-05, "loss": 0.9668, "step": 9310 }, { "epoch": 0.4751950237087646, "grad_norm": 4.72644032568067, "learning_rate": 5.643897920904926e-05, "loss": 0.9844, "step": 9320 }, { "epoch": 0.4757048896140315, "grad_norm": 3.7301453079447784, "learning_rate": 5.635708880723858e-05, "loss": 0.9383, "step": 9330 }, { "epoch": 0.4762147555192984, "grad_norm": 3.0814586247871993, "learning_rate": 5.627518106921425e-05, "loss": 0.9842, "step": 9340 }, { "epoch": 0.47672462142456534, "grad_norm": 5.42698297896619, "learning_rate": 5.6193256218344246e-05, "loss": 0.9641, "step": 9350 }, { "epoch": 0.47723448732983226, "grad_norm": 5.152280972858124, "learning_rate": 5.611131447804321e-05, "loss": 0.9671, "step": 9360 }, { "epoch": 0.4777443532350992, "grad_norm": 4.162051120491686, "learning_rate": 5.602935607177182e-05, "loss": 0.9838, "step": 9370 }, { "epoch": 0.4782542191403661, "grad_norm": 3.559590998622765, "learning_rate": 5.5947381223036244e-05, "loss": 1.0307, "step": 9380 }, { "epoch": 0.478764085045633, "grad_norm": 4.125612062475158, "learning_rate": 5.5865390155387484e-05, "loss": 0.9813, "step": 9390 }, { "epoch": 0.4792739509508999, "grad_norm": 3.2131711610681792, "learning_rate": 5.5783383092420746e-05, "loss": 0.8639, "step": 9400 }, { "epoch": 0.47978381685616683, "grad_norm": 3.2969378581814586, "learning_rate": 5.5701360257774846e-05, "loss": 0.9565, "step": 9410 }, { "epoch": 0.48029368276143375, "grad_norm": 3.1462897733941895, "learning_rate": 5.561932187513168e-05, "loss": 0.9644, "step": 9420 }, { "epoch": 0.48080354866670066, "grad_norm": 3.4176249243038344, "learning_rate": 5.553726816821547e-05, "loss": 0.9881, "step": 9430 }, { "epoch": 0.4813134145719676, "grad_norm": 2.484582987435529, "learning_rate": 5.5455199360792276e-05, "loss": 0.9131, "step": 9440 }, { "epoch": 0.4818232804772345, "grad_norm": 4.046107903525553, "learning_rate": 5.5373115676669286e-05, "loss": 1.027, "step": 9450 }, { "epoch": 0.4823331463825014, "grad_norm": 2.7781539123813856, "learning_rate": 5.529101733969432e-05, "loss": 0.9215, "step": 9460 }, { "epoch": 0.4828430122877683, "grad_norm": 4.07554206961546, "learning_rate": 5.5208904573755104e-05, "loss": 0.9881, "step": 9470 }, { "epoch": 0.48335287819303524, "grad_norm": 3.3271161274855476, "learning_rate": 5.5126777602778765e-05, "loss": 0.9697, "step": 9480 }, { "epoch": 0.48386274409830216, "grad_norm": 2.6603039051771638, "learning_rate": 5.504463665073112e-05, "loss": 0.9197, "step": 9490 }, { "epoch": 0.48437261000356907, "grad_norm": 2.9061379748402443, "learning_rate": 5.496248194161615e-05, "loss": 0.9588, "step": 9500 }, { "epoch": 0.484882475908836, "grad_norm": 3.3355590411210168, "learning_rate": 5.4880313699475314e-05, "loss": 1.036, "step": 9510 }, { "epoch": 0.4853923418141029, "grad_norm": 3.451459158559493, "learning_rate": 5.4798132148387013e-05, "loss": 1.0624, "step": 9520 }, { "epoch": 0.4859022077193698, "grad_norm": 2.3258525703065063, "learning_rate": 5.471593751246592e-05, "loss": 0.9248, "step": 9530 }, { "epoch": 0.48641207362463673, "grad_norm": 2.8863302547945318, "learning_rate": 5.4633730015862414e-05, "loss": 0.9438, "step": 9540 }, { "epoch": 0.48692193952990365, "grad_norm": 3.592383771973474, "learning_rate": 5.455150988276192e-05, "loss": 0.9935, "step": 9550 }, { "epoch": 0.48743180543517056, "grad_norm": 3.5051333487356624, "learning_rate": 5.446927733738433e-05, "loss": 0.9247, "step": 9560 }, { "epoch": 0.4879416713404375, "grad_norm": 2.6807353943018435, "learning_rate": 5.43870326039834e-05, "loss": 0.9739, "step": 9570 }, { "epoch": 0.4884515372457044, "grad_norm": 4.305074246806519, "learning_rate": 5.43047759068461e-05, "loss": 0.9822, "step": 9580 }, { "epoch": 0.4889614031509713, "grad_norm": 2.354641769723023, "learning_rate": 5.4222507470292036e-05, "loss": 0.8612, "step": 9590 }, { "epoch": 0.4894712690562382, "grad_norm": 2.965498591324047, "learning_rate": 5.414022751867285e-05, "loss": 0.9204, "step": 9600 }, { "epoch": 0.48998113496150514, "grad_norm": 6.3205482558893005, "learning_rate": 5.4057936276371565e-05, "loss": 0.9709, "step": 9610 }, { "epoch": 0.49049100086677205, "grad_norm": 2.010926865518373, "learning_rate": 5.3975633967801986e-05, "loss": 0.9492, "step": 9620 }, { "epoch": 0.49100086677203897, "grad_norm": 3.452431723535906, "learning_rate": 5.389332081740812e-05, "loss": 0.9015, "step": 9630 }, { "epoch": 0.4915107326773059, "grad_norm": 5.034933917396801, "learning_rate": 5.381099704966352e-05, "loss": 0.9986, "step": 9640 }, { "epoch": 0.4920205985825728, "grad_norm": 4.612910015735674, "learning_rate": 5.372866288907069e-05, "loss": 0.959, "step": 9650 }, { "epoch": 0.4925304644878397, "grad_norm": 3.5663763294504522, "learning_rate": 5.364631856016051e-05, "loss": 0.9167, "step": 9660 }, { "epoch": 0.49304033039310663, "grad_norm": 2.7699815026050114, "learning_rate": 5.356396428749155e-05, "loss": 0.9322, "step": 9670 }, { "epoch": 0.49355019629837354, "grad_norm": 5.781944370641614, "learning_rate": 5.3481600295649525e-05, "loss": 0.9052, "step": 9680 }, { "epoch": 0.49406006220364046, "grad_norm": 2.895890141583381, "learning_rate": 5.339922680924664e-05, "loss": 1.0033, "step": 9690 }, { "epoch": 0.4945699281089074, "grad_norm": 2.639129524416714, "learning_rate": 5.331684405292099e-05, "loss": 0.9316, "step": 9700 }, { "epoch": 0.4950797940141743, "grad_norm": 3.8489717935083454, "learning_rate": 5.3234452251335955e-05, "loss": 0.978, "step": 9710 }, { "epoch": 0.4955896599194412, "grad_norm": 4.641154634386726, "learning_rate": 5.31520516291796e-05, "loss": 0.9366, "step": 9720 }, { "epoch": 0.4960995258247081, "grad_norm": 4.1945884250295125, "learning_rate": 5.306964241116402e-05, "loss": 0.9642, "step": 9730 }, { "epoch": 0.49660939172997504, "grad_norm": 3.804927333278639, "learning_rate": 5.298722482202475e-05, "loss": 0.9141, "step": 9740 }, { "epoch": 0.49711925763524195, "grad_norm": 3.3137262873679245, "learning_rate": 5.290479908652017e-05, "loss": 0.9532, "step": 9750 }, { "epoch": 0.49762912354050887, "grad_norm": 3.598504231825922, "learning_rate": 5.2822365429430866e-05, "loss": 0.913, "step": 9760 }, { "epoch": 0.4981389894457758, "grad_norm": 2.922676556869131, "learning_rate": 5.2739924075559024e-05, "loss": 0.9417, "step": 9770 }, { "epoch": 0.4986488553510427, "grad_norm": 4.35078006858887, "learning_rate": 5.265747524972784e-05, "loss": 0.9719, "step": 9780 }, { "epoch": 0.4991587212563096, "grad_norm": 2.5548950063605846, "learning_rate": 5.257501917678085e-05, "loss": 0.9239, "step": 9790 }, { "epoch": 0.4996685871615765, "grad_norm": 6.836250183279308, "learning_rate": 5.249255608158139e-05, "loss": 0.9607, "step": 9800 }, { "epoch": 0.5001784530668434, "grad_norm": 2.9653175822252567, "learning_rate": 5.241008618901192e-05, "loss": 0.9716, "step": 9810 }, { "epoch": 0.5006883189721103, "grad_norm": 3.734122705555406, "learning_rate": 5.232760972397347e-05, "loss": 0.9289, "step": 9820 }, { "epoch": 0.5011981848773772, "grad_norm": 2.469867186688575, "learning_rate": 5.224512691138494e-05, "loss": 0.9609, "step": 9830 }, { "epoch": 0.5017080507826441, "grad_norm": 3.5526576361576017, "learning_rate": 5.2162637976182584e-05, "loss": 0.9721, "step": 9840 }, { "epoch": 0.502217916687911, "grad_norm": 3.9133632073025635, "learning_rate": 5.208014314331935e-05, "loss": 0.9317, "step": 9850 }, { "epoch": 0.502727782593178, "grad_norm": 3.2917362546113615, "learning_rate": 5.1997642637764255e-05, "loss": 0.9953, "step": 9860 }, { "epoch": 0.5032376484984449, "grad_norm": 4.711424755994914, "learning_rate": 5.191513668450178e-05, "loss": 0.9486, "step": 9870 }, { "epoch": 0.5037475144037118, "grad_norm": 2.321341822761805, "learning_rate": 5.183262550853127e-05, "loss": 0.9446, "step": 9880 }, { "epoch": 0.5042573803089787, "grad_norm": 2.786223507865711, "learning_rate": 5.175010933486634e-05, "loss": 0.9548, "step": 9890 }, { "epoch": 0.5047672462142456, "grad_norm": 3.6083918636921695, "learning_rate": 5.166758838853418e-05, "loss": 0.9483, "step": 9900 }, { "epoch": 0.5052771121195125, "grad_norm": 2.3338798016112023, "learning_rate": 5.1585062894575034e-05, "loss": 0.9208, "step": 9910 }, { "epoch": 0.5057869780247795, "grad_norm": 2.6959985130224955, "learning_rate": 5.150253307804156e-05, "loss": 0.8596, "step": 9920 }, { "epoch": 0.5062968439300464, "grad_norm": 3.5257109643018723, "learning_rate": 5.141999916399816e-05, "loss": 0.97, "step": 9930 }, { "epoch": 0.5068067098353133, "grad_norm": 3.082519782660571, "learning_rate": 5.133746137752044e-05, "loss": 0.9658, "step": 9940 }, { "epoch": 0.5073165757405802, "grad_norm": 2.561134174703815, "learning_rate": 5.1254919943694526e-05, "loss": 0.9268, "step": 9950 }, { "epoch": 0.5078264416458471, "grad_norm": 3.7181675274604706, "learning_rate": 5.1172375087616576e-05, "loss": 0.9573, "step": 9960 }, { "epoch": 0.508336307551114, "grad_norm": 2.795604150973193, "learning_rate": 5.108982703439201e-05, "loss": 1.0011, "step": 9970 }, { "epoch": 0.508846173456381, "grad_norm": 2.9016638978849163, "learning_rate": 5.1007276009134976e-05, "loss": 0.9783, "step": 9980 }, { "epoch": 0.5093560393616479, "grad_norm": 2.7194359601726084, "learning_rate": 5.092472223696774e-05, "loss": 0.9422, "step": 9990 }, { "epoch": 0.5098659052669148, "grad_norm": 4.318325714358832, "learning_rate": 5.0842165943020046e-05, "loss": 1.0178, "step": 10000 }, { "epoch": 0.5103757711721817, "grad_norm": 3.6852911837265285, "learning_rate": 5.075960735242854e-05, "loss": 0.9389, "step": 10010 }, { "epoch": 0.5108856370774486, "grad_norm": 2.795966314706265, "learning_rate": 5.0677046690336096e-05, "loss": 0.9737, "step": 10020 }, { "epoch": 0.5113955029827155, "grad_norm": 2.7930043328638523, "learning_rate": 5.059448418189128e-05, "loss": 0.9496, "step": 10030 }, { "epoch": 0.5119053688879824, "grad_norm": 3.062256031105531, "learning_rate": 5.051192005224765e-05, "loss": 0.9023, "step": 10040 }, { "epoch": 0.5124152347932494, "grad_norm": 6.780172533538299, "learning_rate": 5.042935452656322e-05, "loss": 1.0067, "step": 10050 }, { "epoch": 0.5129251006985163, "grad_norm": 2.776353296984024, "learning_rate": 5.034678782999979e-05, "loss": 0.9937, "step": 10060 }, { "epoch": 0.5134349666037832, "grad_norm": 3.3365394053880335, "learning_rate": 5.026422018772234e-05, "loss": 0.8942, "step": 10070 }, { "epoch": 0.5139448325090501, "grad_norm": 2.3409195569526515, "learning_rate": 5.0181651824898466e-05, "loss": 0.9252, "step": 10080 }, { "epoch": 0.514454698414317, "grad_norm": 3.879946089564467, "learning_rate": 5.009908296669772e-05, "loss": 0.921, "step": 10090 }, { "epoch": 0.5149645643195839, "grad_norm": 5.206276131399787, "learning_rate": 5.001651383829098e-05, "loss": 0.9434, "step": 10100 }, { "epoch": 0.5154744302248508, "grad_norm": 3.240935073507238, "learning_rate": 4.993394466484987e-05, "loss": 0.9842, "step": 10110 }, { "epoch": 0.5159842961301178, "grad_norm": 5.4587787978074935, "learning_rate": 4.985137567154616e-05, "loss": 0.9391, "step": 10120 }, { "epoch": 0.5164941620353847, "grad_norm": 3.84414031953169, "learning_rate": 4.97688070835511e-05, "loss": 0.9602, "step": 10130 }, { "epoch": 0.5170040279406516, "grad_norm": 4.705896233937508, "learning_rate": 4.968623912603485e-05, "loss": 0.9239, "step": 10140 }, { "epoch": 0.5175138938459185, "grad_norm": 4.205557321794439, "learning_rate": 4.960367202416587e-05, "loss": 0.9746, "step": 10150 }, { "epoch": 0.5180237597511854, "grad_norm": 2.744594544456746, "learning_rate": 4.9521106003110216e-05, "loss": 0.9, "step": 10160 }, { "epoch": 0.5185336256564523, "grad_norm": 2.2880730895604975, "learning_rate": 4.943854128803106e-05, "loss": 0.9323, "step": 10170 }, { "epoch": 0.5190434915617193, "grad_norm": 4.541253564776076, "learning_rate": 4.935597810408804e-05, "loss": 1.0212, "step": 10180 }, { "epoch": 0.5195533574669862, "grad_norm": 3.317522442961713, "learning_rate": 4.9273416676436526e-05, "loss": 0.9651, "step": 10190 }, { "epoch": 0.5200632233722531, "grad_norm": 4.626442522093559, "learning_rate": 4.9190857230227186e-05, "loss": 0.9401, "step": 10200 }, { "epoch": 0.52057308927752, "grad_norm": 4.70036240305505, "learning_rate": 4.9108299990605204e-05, "loss": 0.9635, "step": 10210 }, { "epoch": 0.5210829551827869, "grad_norm": 2.761068503299125, "learning_rate": 4.902574518270982e-05, "loss": 0.9852, "step": 10220 }, { "epoch": 0.5215928210880538, "grad_norm": 2.656996733011883, "learning_rate": 4.894319303167359e-05, "loss": 0.88, "step": 10230 }, { "epoch": 0.5221026869933207, "grad_norm": 4.221748168653935, "learning_rate": 4.8860643762621854e-05, "loss": 1.0218, "step": 10240 }, { "epoch": 0.5226125528985877, "grad_norm": 3.03526122402303, "learning_rate": 4.877809760067211e-05, "loss": 0.8784, "step": 10250 }, { "epoch": 0.5231224188038546, "grad_norm": 3.2804201533463493, "learning_rate": 4.869555477093332e-05, "loss": 0.9657, "step": 10260 }, { "epoch": 0.5236322847091215, "grad_norm": 3.7269652474464205, "learning_rate": 4.861301549850543e-05, "loss": 0.9414, "step": 10270 }, { "epoch": 0.5241421506143884, "grad_norm": 4.796750509047648, "learning_rate": 4.8530480008478614e-05, "loss": 0.9575, "step": 10280 }, { "epoch": 0.5246520165196553, "grad_norm": 4.519107152482081, "learning_rate": 4.8447948525932785e-05, "loss": 0.9862, "step": 10290 }, { "epoch": 0.5251618824249222, "grad_norm": 3.426639454230538, "learning_rate": 4.836542127593693e-05, "loss": 0.9354, "step": 10300 }, { "epoch": 0.5256717483301891, "grad_norm": 3.5190702566578005, "learning_rate": 4.8282898483548436e-05, "loss": 0.8566, "step": 10310 }, { "epoch": 0.5261816142354561, "grad_norm": 2.6438908663160707, "learning_rate": 4.8200380373812606e-05, "loss": 0.9285, "step": 10320 }, { "epoch": 0.526691480140723, "grad_norm": 2.754499762740791, "learning_rate": 4.8117867171761916e-05, "loss": 0.8905, "step": 10330 }, { "epoch": 0.5272013460459899, "grad_norm": 3.4618334631287504, "learning_rate": 4.803535910241551e-05, "loss": 0.9515, "step": 10340 }, { "epoch": 0.5277112119512568, "grad_norm": 3.448601636720122, "learning_rate": 4.7952856390778466e-05, "loss": 0.8888, "step": 10350 }, { "epoch": 0.5282210778565237, "grad_norm": 3.8156484134863065, "learning_rate": 4.787035926184131e-05, "loss": 0.9402, "step": 10360 }, { "epoch": 0.5287309437617906, "grad_norm": 3.5299280212248156, "learning_rate": 4.7787867940579354e-05, "loss": 0.9026, "step": 10370 }, { "epoch": 0.5292408096670576, "grad_norm": 4.2496355214292185, "learning_rate": 4.770538265195199e-05, "loss": 0.9967, "step": 10380 }, { "epoch": 0.5297506755723245, "grad_norm": 3.0275611877217057, "learning_rate": 4.7622903620902265e-05, "loss": 0.9648, "step": 10390 }, { "epoch": 0.5302605414775914, "grad_norm": 3.2138186040832246, "learning_rate": 4.7540431072356056e-05, "loss": 0.9205, "step": 10400 }, { "epoch": 0.5307704073828583, "grad_norm": 3.006892944145669, "learning_rate": 4.7457965231221634e-05, "loss": 1.0059, "step": 10410 }, { "epoch": 0.5312802732881252, "grad_norm": 2.5735484280254424, "learning_rate": 4.737550632238899e-05, "loss": 0.9398, "step": 10420 }, { "epoch": 0.5317901391933921, "grad_norm": 2.717086537919467, "learning_rate": 4.729305457072913e-05, "loss": 0.8851, "step": 10430 }, { "epoch": 0.532300005098659, "grad_norm": 3.211894198936536, "learning_rate": 4.7210610201093617e-05, "loss": 0.9396, "step": 10440 }, { "epoch": 0.532809871003926, "grad_norm": 4.919270004384732, "learning_rate": 4.712817343831384e-05, "loss": 0.9608, "step": 10450 }, { "epoch": 0.5333197369091929, "grad_norm": 2.397973165692253, "learning_rate": 4.7045744507200486e-05, "loss": 0.9257, "step": 10460 }, { "epoch": 0.5338296028144598, "grad_norm": 2.8041603830628987, "learning_rate": 4.696332363254282e-05, "loss": 0.9581, "step": 10470 }, { "epoch": 0.5343394687197267, "grad_norm": 7.021143848255496, "learning_rate": 4.688091103910819e-05, "loss": 0.9404, "step": 10480 }, { "epoch": 0.5348493346249936, "grad_norm": 2.0721120953637646, "learning_rate": 4.679850695164138e-05, "loss": 0.8397, "step": 10490 }, { "epoch": 0.5353592005302605, "grad_norm": 2.520850181356826, "learning_rate": 4.671611159486389e-05, "loss": 0.9275, "step": 10500 }, { "epoch": 0.5358690664355275, "grad_norm": 2.5231701025343245, "learning_rate": 4.66337251934735e-05, "loss": 0.951, "step": 10510 }, { "epoch": 0.5363789323407944, "grad_norm": 3.8940168673136784, "learning_rate": 4.65513479721435e-05, "loss": 0.9202, "step": 10520 }, { "epoch": 0.5368887982460613, "grad_norm": 2.5217152560237737, "learning_rate": 4.646898015552219e-05, "loss": 0.8935, "step": 10530 }, { "epoch": 0.5373986641513282, "grad_norm": 2.633379640768002, "learning_rate": 4.638662196823223e-05, "loss": 0.9552, "step": 10540 }, { "epoch": 0.5379085300565951, "grad_norm": 3.610584731612508, "learning_rate": 4.630427363486996e-05, "loss": 0.9558, "step": 10550 }, { "epoch": 0.538418395961862, "grad_norm": 3.6451579333644255, "learning_rate": 4.622193538000494e-05, "loss": 0.959, "step": 10560 }, { "epoch": 0.5389282618671289, "grad_norm": 2.73070710265327, "learning_rate": 4.613960742817914e-05, "loss": 0.9415, "step": 10570 }, { "epoch": 0.5394381277723959, "grad_norm": 3.2767593604993404, "learning_rate": 4.6057290003906526e-05, "loss": 0.8898, "step": 10580 }, { "epoch": 0.5399479936776628, "grad_norm": 3.4661086987132634, "learning_rate": 4.597498333167227e-05, "loss": 0.9227, "step": 10590 }, { "epoch": 0.5404578595829297, "grad_norm": 3.233418939753378, "learning_rate": 4.58926876359323e-05, "loss": 0.9685, "step": 10600 }, { "epoch": 0.5409677254881966, "grad_norm": 3.1363887735424933, "learning_rate": 4.581040314111259e-05, "loss": 0.9405, "step": 10610 }, { "epoch": 0.5414775913934635, "grad_norm": 2.83491402664726, "learning_rate": 4.5728130071608506e-05, "loss": 0.9299, "step": 10620 }, { "epoch": 0.5419874572987304, "grad_norm": 6.002022657603547, "learning_rate": 4.5645868651784354e-05, "loss": 0.9689, "step": 10630 }, { "epoch": 0.5424973232039974, "grad_norm": 3.247394883373861, "learning_rate": 4.5563619105972594e-05, "loss": 0.9417, "step": 10640 }, { "epoch": 0.5430071891092643, "grad_norm": 3.1962757505757726, "learning_rate": 4.5481381658473346e-05, "loss": 0.9484, "step": 10650 }, { "epoch": 0.5435170550145312, "grad_norm": 3.64320500753679, "learning_rate": 4.53991565335537e-05, "loss": 0.9515, "step": 10660 }, { "epoch": 0.5440269209197981, "grad_norm": 3.7059387220241513, "learning_rate": 4.531694395544719e-05, "loss": 0.9219, "step": 10670 }, { "epoch": 0.544536786825065, "grad_norm": 3.375884864641275, "learning_rate": 4.523474414835313e-05, "loss": 0.9091, "step": 10680 }, { "epoch": 0.5450466527303319, "grad_norm": 3.5558570711149344, "learning_rate": 4.515255733643593e-05, "loss": 0.9136, "step": 10690 }, { "epoch": 0.5455565186355988, "grad_norm": 4.808820482356941, "learning_rate": 4.5070383743824674e-05, "loss": 0.9184, "step": 10700 }, { "epoch": 0.5460663845408658, "grad_norm": 2.6218298124518062, "learning_rate": 4.498822359461229e-05, "loss": 0.9326, "step": 10710 }, { "epoch": 0.5465762504461327, "grad_norm": 2.42543674839897, "learning_rate": 4.4906077112855116e-05, "loss": 1.0058, "step": 10720 }, { "epoch": 0.5470861163513996, "grad_norm": 3.1361509539515393, "learning_rate": 4.4823944522572216e-05, "loss": 0.8866, "step": 10730 }, { "epoch": 0.5475959822566665, "grad_norm": 3.6852645001813085, "learning_rate": 4.474182604774471e-05, "loss": 1.0186, "step": 10740 }, { "epoch": 0.5481058481619334, "grad_norm": 3.4131327708149852, "learning_rate": 4.465972191231531e-05, "loss": 0.9117, "step": 10750 }, { "epoch": 0.5486157140672003, "grad_norm": 3.191893783018942, "learning_rate": 4.457763234018753e-05, "loss": 0.9349, "step": 10760 }, { "epoch": 0.5491255799724672, "grad_norm": 2.544643243504104, "learning_rate": 4.4495557555225276e-05, "loss": 0.8535, "step": 10770 }, { "epoch": 0.5496354458777342, "grad_norm": 2.51958458544114, "learning_rate": 4.441349778125201e-05, "loss": 0.9941, "step": 10780 }, { "epoch": 0.5501453117830011, "grad_norm": 4.107901228678458, "learning_rate": 4.433145324205035e-05, "loss": 0.9923, "step": 10790 }, { "epoch": 0.550655177688268, "grad_norm": 3.471936059851544, "learning_rate": 4.424942416136136e-05, "loss": 0.9084, "step": 10800 }, { "epoch": 0.5511650435935349, "grad_norm": 4.483957358898498, "learning_rate": 4.4167410762883863e-05, "loss": 0.9059, "step": 10810 }, { "epoch": 0.5516749094988018, "grad_norm": 2.6148774956085195, "learning_rate": 4.408541327027404e-05, "loss": 0.9014, "step": 10820 }, { "epoch": 0.5521847754040687, "grad_norm": 2.5513218383095064, "learning_rate": 4.4003431907144576e-05, "loss": 0.9092, "step": 10830 }, { "epoch": 0.5526946413093357, "grad_norm": 3.2935364006275756, "learning_rate": 4.392146689706425e-05, "loss": 0.886, "step": 10840 }, { "epoch": 0.5532045072146026, "grad_norm": 8.626314413796218, "learning_rate": 4.3839518463557234e-05, "loss": 0.9992, "step": 10850 }, { "epoch": 0.5537143731198695, "grad_norm": 4.180141893470191, "learning_rate": 4.3757586830102464e-05, "loss": 0.9177, "step": 10860 }, { "epoch": 0.5542242390251364, "grad_norm": 3.7520943854550435, "learning_rate": 4.3675672220133085e-05, "loss": 0.9843, "step": 10870 }, { "epoch": 0.5547341049304033, "grad_norm": 3.529144886650154, "learning_rate": 4.3593774857035795e-05, "loss": 1.0294, "step": 10880 }, { "epoch": 0.5552439708356702, "grad_norm": 3.8101858885076125, "learning_rate": 4.351189496415031e-05, "loss": 0.8928, "step": 10890 }, { "epoch": 0.5557538367409371, "grad_norm": 4.248885275921915, "learning_rate": 4.343003276476862e-05, "loss": 0.952, "step": 10900 }, { "epoch": 0.5562637026462041, "grad_norm": 3.213356356055621, "learning_rate": 4.334818848213455e-05, "loss": 0.8553, "step": 10910 }, { "epoch": 0.556773568551471, "grad_norm": 3.121971553205506, "learning_rate": 4.326636233944303e-05, "loss": 0.9389, "step": 10920 }, { "epoch": 0.5572834344567379, "grad_norm": 2.2812367647348135, "learning_rate": 4.318455455983949e-05, "loss": 0.9203, "step": 10930 }, { "epoch": 0.5577933003620048, "grad_norm": 2.7159153450702718, "learning_rate": 4.310276536641932e-05, "loss": 0.939, "step": 10940 }, { "epoch": 0.5583031662672717, "grad_norm": 3.606694364467837, "learning_rate": 4.302099498222722e-05, "loss": 1.0178, "step": 10950 }, { "epoch": 0.5588130321725386, "grad_norm": 5.270098055897755, "learning_rate": 4.2939243630256584e-05, "loss": 0.987, "step": 10960 }, { "epoch": 0.5593228980778056, "grad_norm": 3.4379590615846762, "learning_rate": 4.285751153344892e-05, "loss": 0.9356, "step": 10970 }, { "epoch": 0.5598327639830725, "grad_norm": 3.3452241656556767, "learning_rate": 4.27757989146932e-05, "loss": 0.9208, "step": 10980 }, { "epoch": 0.5603426298883394, "grad_norm": 3.4990502423669043, "learning_rate": 4.2694105996825325e-05, "loss": 0.935, "step": 10990 }, { "epoch": 0.5608524957936063, "grad_norm": 3.3464667008325955, "learning_rate": 4.2612433002627404e-05, "loss": 0.958, "step": 11000 }, { "epoch": 0.5613623616988732, "grad_norm": 3.577841867130007, "learning_rate": 4.2530780154827284e-05, "loss": 0.8888, "step": 11010 }, { "epoch": 0.5618722276041401, "grad_norm": 2.919441037595988, "learning_rate": 4.244914767609779e-05, "loss": 0.927, "step": 11020 }, { "epoch": 0.562382093509407, "grad_norm": 2.754880039651409, "learning_rate": 4.236753578905627e-05, "loss": 0.9013, "step": 11030 }, { "epoch": 0.562891959414674, "grad_norm": 3.2188275287215142, "learning_rate": 4.228594471626392e-05, "loss": 0.9431, "step": 11040 }, { "epoch": 0.5634018253199409, "grad_norm": 3.1991664938547992, "learning_rate": 4.22043746802251e-05, "loss": 0.8902, "step": 11050 }, { "epoch": 0.5639116912252078, "grad_norm": 3.1340245290870055, "learning_rate": 4.2122825903386874e-05, "loss": 0.9363, "step": 11060 }, { "epoch": 0.5644215571304747, "grad_norm": 2.499072366200033, "learning_rate": 4.204129860813828e-05, "loss": 0.8899, "step": 11070 }, { "epoch": 0.5649314230357416, "grad_norm": 3.367362167872141, "learning_rate": 4.1959793016809835e-05, "loss": 0.9268, "step": 11080 }, { "epoch": 0.5654412889410085, "grad_norm": 4.055317211034551, "learning_rate": 4.187830935167281e-05, "loss": 0.8924, "step": 11090 }, { "epoch": 0.5659511548462755, "grad_norm": 3.708789845615069, "learning_rate": 4.1796847834938694e-05, "loss": 0.9433, "step": 11100 }, { "epoch": 0.5664610207515424, "grad_norm": 3.342070976615128, "learning_rate": 4.171540868875862e-05, "loss": 0.8593, "step": 11110 }, { "epoch": 0.5669708866568093, "grad_norm": 2.8712579947880887, "learning_rate": 4.163399213522263e-05, "loss": 0.932, "step": 11120 }, { "epoch": 0.5674807525620762, "grad_norm": 4.6773806382841965, "learning_rate": 4.155259839635925e-05, "loss": 1.0471, "step": 11130 }, { "epoch": 0.5679906184673431, "grad_norm": 3.078974731646656, "learning_rate": 4.147122769413471e-05, "loss": 0.9242, "step": 11140 }, { "epoch": 0.56850048437261, "grad_norm": 6.231642085771952, "learning_rate": 4.138988025045245e-05, "loss": 0.9739, "step": 11150 }, { "epoch": 0.5690103502778769, "grad_norm": 3.0839405522612067, "learning_rate": 4.1308556287152523e-05, "loss": 0.9526, "step": 11160 }, { "epoch": 0.5695202161831439, "grad_norm": 3.7497699141702743, "learning_rate": 4.1227256026010875e-05, "loss": 0.9777, "step": 11170 }, { "epoch": 0.5700300820884108, "grad_norm": 3.9862229817529444, "learning_rate": 4.114597968873886e-05, "loss": 0.9009, "step": 11180 }, { "epoch": 0.5705399479936777, "grad_norm": 3.891982808605384, "learning_rate": 4.106472749698256e-05, "loss": 0.9026, "step": 11190 }, { "epoch": 0.5710498138989446, "grad_norm": 3.3862676151861586, "learning_rate": 4.098349967232225e-05, "loss": 0.9402, "step": 11200 }, { "epoch": 0.5715596798042115, "grad_norm": 3.306179449922897, "learning_rate": 4.090229643627176e-05, "loss": 0.9188, "step": 11210 }, { "epoch": 0.5720695457094784, "grad_norm": 2.525184577226416, "learning_rate": 4.082111801027778e-05, "loss": 0.8973, "step": 11220 }, { "epoch": 0.5725794116147453, "grad_norm": 2.653011738044738, "learning_rate": 4.073996461571945e-05, "loss": 0.9313, "step": 11230 }, { "epoch": 0.5730892775200123, "grad_norm": 2.2431306141496155, "learning_rate": 4.065883647390756e-05, "loss": 0.8939, "step": 11240 }, { "epoch": 0.5735991434252792, "grad_norm": 4.048488570656739, "learning_rate": 4.057773380608411e-05, "loss": 0.9402, "step": 11250 }, { "epoch": 0.5741090093305461, "grad_norm": 4.173730329774634, "learning_rate": 4.049665683342154e-05, "loss": 0.9531, "step": 11260 }, { "epoch": 0.574618875235813, "grad_norm": 2.5522114553016038, "learning_rate": 4.0415605777022306e-05, "loss": 0.9363, "step": 11270 }, { "epoch": 0.5751287411410799, "grad_norm": 2.2293996195235084, "learning_rate": 4.0334580857918165e-05, "loss": 0.8969, "step": 11280 }, { "epoch": 0.5756386070463468, "grad_norm": 4.178070132359859, "learning_rate": 4.025358229706955e-05, "loss": 0.9183, "step": 11290 }, { "epoch": 0.5761484729516138, "grad_norm": 3.1278468887729454, "learning_rate": 4.017261031536509e-05, "loss": 0.86, "step": 11300 }, { "epoch": 0.5766583388568807, "grad_norm": 3.7707189328781254, "learning_rate": 4.0091665133620835e-05, "loss": 0.9194, "step": 11310 }, { "epoch": 0.5771682047621476, "grad_norm": 3.610050800355324, "learning_rate": 4.001074697257986e-05, "loss": 0.9452, "step": 11320 }, { "epoch": 0.5776780706674145, "grad_norm": 2.6631250167415543, "learning_rate": 3.992985605291143e-05, "loss": 0.8058, "step": 11330 }, { "epoch": 0.5781879365726814, "grad_norm": 3.16186610328534, "learning_rate": 3.9848992595210624e-05, "loss": 0.922, "step": 11340 }, { "epoch": 0.5786978024779483, "grad_norm": 3.013990161069991, "learning_rate": 3.976815681999762e-05, "loss": 0.8733, "step": 11350 }, { "epoch": 0.5792076683832152, "grad_norm": 3.0182525666443234, "learning_rate": 3.968734894771701e-05, "loss": 0.9217, "step": 11360 }, { "epoch": 0.5797175342884822, "grad_norm": 3.680891120799879, "learning_rate": 3.9606569198737406e-05, "loss": 0.9184, "step": 11370 }, { "epoch": 0.5802274001937491, "grad_norm": 3.9826156164741393, "learning_rate": 3.952581779335066e-05, "loss": 0.9767, "step": 11380 }, { "epoch": 0.580737266099016, "grad_norm": 4.453625045000694, "learning_rate": 3.9445094951771366e-05, "loss": 0.9414, "step": 11390 }, { "epoch": 0.5812471320042829, "grad_norm": 3.625764242947431, "learning_rate": 3.936440089413619e-05, "loss": 0.9542, "step": 11400 }, { "epoch": 0.5817569979095498, "grad_norm": 2.6821388085917484, "learning_rate": 3.928373584050331e-05, "loss": 0.9525, "step": 11410 }, { "epoch": 0.5822668638148167, "grad_norm": 5.108250606198265, "learning_rate": 3.9203100010851845e-05, "loss": 0.8815, "step": 11420 }, { "epoch": 0.5827767297200837, "grad_norm": 2.3673563653919736, "learning_rate": 3.912249362508115e-05, "loss": 0.9805, "step": 11430 }, { "epoch": 0.5832865956253506, "grad_norm": 3.253477854934498, "learning_rate": 3.904191690301036e-05, "loss": 0.873, "step": 11440 }, { "epoch": 0.5837964615306175, "grad_norm": 3.2533349136746454, "learning_rate": 3.8961370064377635e-05, "loss": 0.9263, "step": 11450 }, { "epoch": 0.5843063274358844, "grad_norm": 3.1266975497458867, "learning_rate": 3.888085332883971e-05, "loss": 0.9283, "step": 11460 }, { "epoch": 0.5848161933411513, "grad_norm": 2.6265344858376527, "learning_rate": 3.880036691597122e-05, "loss": 0.9028, "step": 11470 }, { "epoch": 0.5853260592464182, "grad_norm": 2.1547246508003832, "learning_rate": 3.871991104526404e-05, "loss": 0.9107, "step": 11480 }, { "epoch": 0.5858359251516851, "grad_norm": 2.933037667488873, "learning_rate": 3.863948593612685e-05, "loss": 0.8794, "step": 11490 }, { "epoch": 0.5863457910569521, "grad_norm": 2.8171298551717947, "learning_rate": 3.855909180788435e-05, "loss": 0.9344, "step": 11500 }, { "epoch": 0.586855656962219, "grad_norm": 4.256605327236434, "learning_rate": 3.847872887977682e-05, "loss": 0.9252, "step": 11510 }, { "epoch": 0.5873655228674859, "grad_norm": 4.920767140524973, "learning_rate": 3.839839737095946e-05, "loss": 0.9089, "step": 11520 }, { "epoch": 0.5878753887727528, "grad_norm": 3.7727525937676907, "learning_rate": 3.8318097500501706e-05, "loss": 0.9241, "step": 11530 }, { "epoch": 0.5883852546780197, "grad_norm": 2.57676433348543, "learning_rate": 3.823782948738682e-05, "loss": 0.9842, "step": 11540 }, { "epoch": 0.5888951205832866, "grad_norm": 5.744358407957162, "learning_rate": 3.8157593550511076e-05, "loss": 0.9248, "step": 11550 }, { "epoch": 0.5894049864885536, "grad_norm": 5.570619707991569, "learning_rate": 3.807738990868339e-05, "loss": 0.8789, "step": 11560 }, { "epoch": 0.5899148523938205, "grad_norm": 2.21579379442429, "learning_rate": 3.79972187806245e-05, "loss": 0.8711, "step": 11570 }, { "epoch": 0.5904247182990874, "grad_norm": 2.3273662229011083, "learning_rate": 3.791708038496655e-05, "loss": 0.9192, "step": 11580 }, { "epoch": 0.5909345842043543, "grad_norm": 3.368524115436495, "learning_rate": 3.783697494025241e-05, "loss": 0.9443, "step": 11590 }, { "epoch": 0.5914444501096212, "grad_norm": 2.9074077356463652, "learning_rate": 3.7756902664935056e-05, "loss": 0.8832, "step": 11600 }, { "epoch": 0.5919543160148881, "grad_norm": 5.565835812156636, "learning_rate": 3.7676863777377054e-05, "loss": 1.005, "step": 11610 }, { "epoch": 0.592464181920155, "grad_norm": 4.182857678708667, "learning_rate": 3.759685849584986e-05, "loss": 0.9664, "step": 11620 }, { "epoch": 0.592974047825422, "grad_norm": 1.8679666859340913, "learning_rate": 3.7516887038533336e-05, "loss": 0.8767, "step": 11630 }, { "epoch": 0.5934839137306889, "grad_norm": 2.849078248665122, "learning_rate": 3.74369496235151e-05, "loss": 0.8783, "step": 11640 }, { "epoch": 0.5939937796359558, "grad_norm": 4.6788568126262975, "learning_rate": 3.735704646878991e-05, "loss": 0.93, "step": 11650 }, { "epoch": 0.5945036455412227, "grad_norm": 3.019283126283293, "learning_rate": 3.7277177792259114e-05, "loss": 0.9045, "step": 11660 }, { "epoch": 0.5950135114464896, "grad_norm": 2.972243476835822, "learning_rate": 3.719734381173e-05, "loss": 0.9524, "step": 11670 }, { "epoch": 0.5955233773517565, "grad_norm": 2.2189264516677345, "learning_rate": 3.71175447449153e-05, "loss": 0.8851, "step": 11680 }, { "epoch": 0.5960332432570234, "grad_norm": 3.2944785067298183, "learning_rate": 3.7037780809432455e-05, "loss": 0.8766, "step": 11690 }, { "epoch": 0.5965431091622904, "grad_norm": 2.414383869960349, "learning_rate": 3.6958052222803184e-05, "loss": 0.8989, "step": 11700 }, { "epoch": 0.5970529750675573, "grad_norm": 2.8865393460989024, "learning_rate": 3.687835920245275e-05, "loss": 0.9272, "step": 11710 }, { "epoch": 0.5975628409728242, "grad_norm": 2.8104658906384246, "learning_rate": 3.679870196570943e-05, "loss": 0.8615, "step": 11720 }, { "epoch": 0.5980727068780911, "grad_norm": 2.8246694591941535, "learning_rate": 3.6719080729803964e-05, "loss": 0.9094, "step": 11730 }, { "epoch": 0.598582572783358, "grad_norm": 2.9577565894440254, "learning_rate": 3.663949571186883e-05, "loss": 0.9594, "step": 11740 }, { "epoch": 0.5990924386886249, "grad_norm": 3.187791797691879, "learning_rate": 3.655994712893781e-05, "loss": 0.8837, "step": 11750 }, { "epoch": 0.5996023045938919, "grad_norm": 3.373158951815689, "learning_rate": 3.648043519794533e-05, "loss": 0.9613, "step": 11760 }, { "epoch": 0.6001121704991588, "grad_norm": 2.95618668476516, "learning_rate": 3.640096013572578e-05, "loss": 0.9403, "step": 11770 }, { "epoch": 0.6006220364044257, "grad_norm": 3.5673480422749635, "learning_rate": 3.6321522159013103e-05, "loss": 0.904, "step": 11780 }, { "epoch": 0.6011319023096926, "grad_norm": 3.118773856517395, "learning_rate": 3.624212148444004e-05, "loss": 0.8945, "step": 11790 }, { "epoch": 0.6016417682149595, "grad_norm": 5.216405275539851, "learning_rate": 3.616275832853765e-05, "loss": 0.9204, "step": 11800 }, { "epoch": 0.6021516341202264, "grad_norm": 2.7133261562797526, "learning_rate": 3.608343290773464e-05, "loss": 0.9189, "step": 11810 }, { "epoch": 0.6026615000254933, "grad_norm": 3.1420562657787627, "learning_rate": 3.600414543835684e-05, "loss": 0.9192, "step": 11820 }, { "epoch": 0.6031713659307603, "grad_norm": 2.0431784110850617, "learning_rate": 3.59248961366266e-05, "loss": 0.9465, "step": 11830 }, { "epoch": 0.6036812318360272, "grad_norm": 3.1976159900884564, "learning_rate": 3.5845685218662105e-05, "loss": 0.9248, "step": 11840 }, { "epoch": 0.6041910977412941, "grad_norm": 3.4779893339813315, "learning_rate": 3.576651290047696e-05, "loss": 0.8583, "step": 11850 }, { "epoch": 0.604700963646561, "grad_norm": 4.066301648674187, "learning_rate": 3.568737939797943e-05, "loss": 0.8628, "step": 11860 }, { "epoch": 0.6052108295518279, "grad_norm": 2.7683789182171723, "learning_rate": 3.5608284926972e-05, "loss": 0.907, "step": 11870 }, { "epoch": 0.6057206954570948, "grad_norm": 2.7643500070217417, "learning_rate": 3.5529229703150616e-05, "loss": 0.9485, "step": 11880 }, { "epoch": 0.6062305613623618, "grad_norm": 2.9399646436397027, "learning_rate": 3.5450213942104295e-05, "loss": 0.875, "step": 11890 }, { "epoch": 0.6067404272676287, "grad_norm": 3.4505591667823285, "learning_rate": 3.537123785931439e-05, "loss": 0.9528, "step": 11900 }, { "epoch": 0.6072502931728956, "grad_norm": 3.925384034593624, "learning_rate": 3.5292301670154027e-05, "loss": 0.9222, "step": 11910 }, { "epoch": 0.6077601590781625, "grad_norm": 3.8888063709502894, "learning_rate": 3.5213405589887606e-05, "loss": 0.9338, "step": 11920 }, { "epoch": 0.6082700249834293, "grad_norm": 2.485004855310051, "learning_rate": 3.513454983367006e-05, "loss": 0.9199, "step": 11930 }, { "epoch": 0.6087798908886962, "grad_norm": 2.9263024358343084, "learning_rate": 3.505573461654642e-05, "loss": 0.8846, "step": 11940 }, { "epoch": 0.6092897567939631, "grad_norm": 4.171333669515278, "learning_rate": 3.497696015345116e-05, "loss": 0.9739, "step": 11950 }, { "epoch": 0.60979962269923, "grad_norm": 3.2731085524136994, "learning_rate": 3.4898226659207574e-05, "loss": 0.9456, "step": 11960 }, { "epoch": 0.610309488604497, "grad_norm": 3.303438605734584, "learning_rate": 3.4819534348527285e-05, "loss": 0.9216, "step": 11970 }, { "epoch": 0.6108193545097639, "grad_norm": 5.698560138213655, "learning_rate": 3.4740883436009544e-05, "loss": 1.0142, "step": 11980 }, { "epoch": 0.6113292204150308, "grad_norm": 3.3245038246763055, "learning_rate": 3.466227413614076e-05, "loss": 0.9261, "step": 11990 }, { "epoch": 0.6118390863202977, "grad_norm": 4.040430285803717, "learning_rate": 3.458370666329384e-05, "loss": 0.9069, "step": 12000 }, { "epoch": 0.6123489522255646, "grad_norm": 3.4869766502419823, "learning_rate": 3.450518123172762e-05, "loss": 0.8981, "step": 12010 }, { "epoch": 0.6128588181308315, "grad_norm": 4.232843588821771, "learning_rate": 3.44266980555863e-05, "loss": 0.8855, "step": 12020 }, { "epoch": 0.6133686840360985, "grad_norm": 3.433938204132207, "learning_rate": 3.434825734889884e-05, "loss": 0.8779, "step": 12030 }, { "epoch": 0.6138785499413654, "grad_norm": 2.8696393308402217, "learning_rate": 3.426985932557841e-05, "loss": 0.938, "step": 12040 }, { "epoch": 0.6143884158466323, "grad_norm": 2.968846488644237, "learning_rate": 3.41915041994217e-05, "loss": 0.8572, "step": 12050 }, { "epoch": 0.6148982817518992, "grad_norm": 5.0791179817812795, "learning_rate": 3.411319218410849e-05, "loss": 0.9212, "step": 12060 }, { "epoch": 0.6154081476571661, "grad_norm": 2.2962426975985823, "learning_rate": 3.403492349320101e-05, "loss": 0.9056, "step": 12070 }, { "epoch": 0.615918013562433, "grad_norm": 1.8383516635333248, "learning_rate": 3.395669834014326e-05, "loss": 0.9271, "step": 12080 }, { "epoch": 0.6164278794677, "grad_norm": 3.1729183959789315, "learning_rate": 3.387851693826057e-05, "loss": 0.923, "step": 12090 }, { "epoch": 0.6169377453729669, "grad_norm": 3.95371535396455, "learning_rate": 3.380037950075894e-05, "loss": 0.9044, "step": 12100 }, { "epoch": 0.6174476112782338, "grad_norm": 3.5827463721945905, "learning_rate": 3.3722286240724476e-05, "loss": 0.9299, "step": 12110 }, { "epoch": 0.6179574771835007, "grad_norm": 3.129245853042041, "learning_rate": 3.3644237371122804e-05, "loss": 0.9198, "step": 12120 }, { "epoch": 0.6184673430887676, "grad_norm": 3.5168693863681932, "learning_rate": 3.3566233104798515e-05, "loss": 0.911, "step": 12130 }, { "epoch": 0.6189772089940345, "grad_norm": 2.1807090890629452, "learning_rate": 3.3488273654474564e-05, "loss": 0.9046, "step": 12140 }, { "epoch": 0.6194870748993014, "grad_norm": 2.5882476557124443, "learning_rate": 3.341035923275164e-05, "loss": 0.8614, "step": 12150 }, { "epoch": 0.6199969408045684, "grad_norm": 3.1771327820708453, "learning_rate": 3.3332490052107704e-05, "loss": 0.9449, "step": 12160 }, { "epoch": 0.6205068067098353, "grad_norm": 3.6009214787575883, "learning_rate": 3.3254666324897274e-05, "loss": 0.8916, "step": 12170 }, { "epoch": 0.6210166726151022, "grad_norm": 5.936868523309456, "learning_rate": 3.317688826335097e-05, "loss": 0.8937, "step": 12180 }, { "epoch": 0.6215265385203691, "grad_norm": 3.5048551406230986, "learning_rate": 3.309915607957487e-05, "loss": 0.9668, "step": 12190 }, { "epoch": 0.622036404425636, "grad_norm": 3.310640103119443, "learning_rate": 3.30214699855499e-05, "loss": 0.9018, "step": 12200 }, { "epoch": 0.6225462703309029, "grad_norm": 2.9346870989631726, "learning_rate": 3.294383019313134e-05, "loss": 0.8914, "step": 12210 }, { "epoch": 0.6230561362361698, "grad_norm": 2.558908426943729, "learning_rate": 3.286623691404819e-05, "loss": 0.9335, "step": 12220 }, { "epoch": 0.6235660021414368, "grad_norm": 4.90660193471452, "learning_rate": 3.2788690359902584e-05, "loss": 0.8607, "step": 12230 }, { "epoch": 0.6240758680467037, "grad_norm": 2.7439371046004783, "learning_rate": 3.271119074216925e-05, "loss": 0.891, "step": 12240 }, { "epoch": 0.6245857339519706, "grad_norm": 3.291852770741101, "learning_rate": 3.263373827219493e-05, "loss": 0.9197, "step": 12250 }, { "epoch": 0.6250955998572375, "grad_norm": 2.634482936247294, "learning_rate": 3.255633316119779e-05, "loss": 0.861, "step": 12260 }, { "epoch": 0.6256054657625044, "grad_norm": 2.409125778859325, "learning_rate": 3.247897562026681e-05, "loss": 0.9235, "step": 12270 }, { "epoch": 0.6261153316677713, "grad_norm": 3.1405082718239363, "learning_rate": 3.2401665860361285e-05, "loss": 0.8404, "step": 12280 }, { "epoch": 0.6266251975730383, "grad_norm": 2.6081088882613894, "learning_rate": 3.232440409231017e-05, "loss": 0.9134, "step": 12290 }, { "epoch": 0.6271350634783052, "grad_norm": 2.2025857873300305, "learning_rate": 3.224719052681156e-05, "loss": 0.919, "step": 12300 }, { "epoch": 0.6276449293835721, "grad_norm": 3.4369456543891554, "learning_rate": 3.2170025374432155e-05, "loss": 0.8261, "step": 12310 }, { "epoch": 0.628154795288839, "grad_norm": 3.8057646280451554, "learning_rate": 3.2092908845606504e-05, "loss": 0.966, "step": 12320 }, { "epoch": 0.6286646611941059, "grad_norm": 2.911014268964996, "learning_rate": 3.201584115063667e-05, "loss": 0.9553, "step": 12330 }, { "epoch": 0.6291745270993728, "grad_norm": 6.233174755628133, "learning_rate": 3.1938822499691475e-05, "loss": 0.8982, "step": 12340 }, { "epoch": 0.6296843930046397, "grad_norm": 2.7744298669701752, "learning_rate": 3.186185310280605e-05, "loss": 0.8442, "step": 12350 }, { "epoch": 0.6301942589099067, "grad_norm": 4.394364057091598, "learning_rate": 3.178493316988115e-05, "loss": 0.9123, "step": 12360 }, { "epoch": 0.6307041248151736, "grad_norm": 2.505549874929729, "learning_rate": 3.170806291068266e-05, "loss": 0.8654, "step": 12370 }, { "epoch": 0.6312139907204405, "grad_norm": 3.1903516974055703, "learning_rate": 3.163124253484105e-05, "loss": 0.9217, "step": 12380 }, { "epoch": 0.6317238566257074, "grad_norm": 4.443949170985445, "learning_rate": 3.1554472251850665e-05, "loss": 0.8163, "step": 12390 }, { "epoch": 0.6322337225309743, "grad_norm": 3.867217892734069, "learning_rate": 3.1477752271069314e-05, "loss": 0.9288, "step": 12400 }, { "epoch": 0.6327435884362412, "grad_norm": 3.6752994660338603, "learning_rate": 3.140108280171758e-05, "loss": 0.9328, "step": 12410 }, { "epoch": 0.6332534543415081, "grad_norm": 3.8749228417087136, "learning_rate": 3.1324464052878345e-05, "loss": 0.8701, "step": 12420 }, { "epoch": 0.6337633202467751, "grad_norm": 3.0585136968349627, "learning_rate": 3.124789623349615e-05, "loss": 0.8484, "step": 12430 }, { "epoch": 0.634273186152042, "grad_norm": 4.2716899334687, "learning_rate": 3.117137955237664e-05, "loss": 0.9894, "step": 12440 }, { "epoch": 0.6347830520573089, "grad_norm": 2.6764020080604483, "learning_rate": 3.109491421818601e-05, "loss": 0.8136, "step": 12450 }, { "epoch": 0.6352929179625758, "grad_norm": 3.1016263312935433, "learning_rate": 3.101850043945042e-05, "loss": 0.8488, "step": 12460 }, { "epoch": 0.6358027838678427, "grad_norm": 2.4679718336374488, "learning_rate": 3.094213842455546e-05, "loss": 0.8484, "step": 12470 }, { "epoch": 0.6363126497731096, "grad_norm": 5.22514561892103, "learning_rate": 3.086582838174551e-05, "loss": 0.8965, "step": 12480 }, { "epoch": 0.6368225156783766, "grad_norm": 3.4772275931848475, "learning_rate": 3.0789570519123277e-05, "loss": 0.8976, "step": 12490 }, { "epoch": 0.6373323815836435, "grad_norm": 4.794405574374894, "learning_rate": 3.0713365044649154e-05, "loss": 0.8751, "step": 12500 }, { "epoch": 0.6378422474889104, "grad_norm": 4.148149373525859, "learning_rate": 3.063721216614061e-05, "loss": 0.9763, "step": 12510 }, { "epoch": 0.6383521133941773, "grad_norm": 4.087205183221575, "learning_rate": 3.056111209127176e-05, "loss": 0.9247, "step": 12520 }, { "epoch": 0.6388619792994442, "grad_norm": 2.360906763307815, "learning_rate": 3.048506502757267e-05, "loss": 0.8688, "step": 12530 }, { "epoch": 0.6393718452047111, "grad_norm": 4.244869787065028, "learning_rate": 3.0409071182428854e-05, "loss": 0.9515, "step": 12540 }, { "epoch": 0.639881711109978, "grad_norm": 3.4986343512590246, "learning_rate": 3.0333130763080698e-05, "loss": 0.9537, "step": 12550 }, { "epoch": 0.640391577015245, "grad_norm": 3.519237012987473, "learning_rate": 3.0257243976622896e-05, "loss": 0.8557, "step": 12560 }, { "epoch": 0.6409014429205119, "grad_norm": 2.7358645537943693, "learning_rate": 3.0181411030003898e-05, "loss": 0.9759, "step": 12570 }, { "epoch": 0.6414113088257788, "grad_norm": 4.385992405742961, "learning_rate": 3.010563213002527e-05, "loss": 0.893, "step": 12580 }, { "epoch": 0.6419211747310457, "grad_norm": 3.3110790101503955, "learning_rate": 3.0029907483341256e-05, "loss": 0.9642, "step": 12590 }, { "epoch": 0.6424310406363126, "grad_norm": 3.0589397674175807, "learning_rate": 2.99542372964581e-05, "loss": 0.8845, "step": 12600 }, { "epoch": 0.6429409065415795, "grad_norm": 2.822803276107203, "learning_rate": 2.9878621775733552e-05, "loss": 0.9041, "step": 12610 }, { "epoch": 0.6434507724468465, "grad_norm": 2.4733742296815584, "learning_rate": 2.980306112737631e-05, "loss": 0.8669, "step": 12620 }, { "epoch": 0.6439606383521134, "grad_norm": 7.301140467631872, "learning_rate": 2.9727555557445363e-05, "loss": 0.9312, "step": 12630 }, { "epoch": 0.6444705042573803, "grad_norm": 2.392548659682165, "learning_rate": 2.9652105271849563e-05, "loss": 0.854, "step": 12640 }, { "epoch": 0.6449803701626472, "grad_norm": 2.7533656924129315, "learning_rate": 2.9576710476346947e-05, "loss": 0.8517, "step": 12650 }, { "epoch": 0.6454902360679141, "grad_norm": 4.327769590092878, "learning_rate": 2.9501371376544284e-05, "loss": 0.8938, "step": 12660 }, { "epoch": 0.646000101973181, "grad_norm": 3.476758331876157, "learning_rate": 2.9426088177896394e-05, "loss": 0.8683, "step": 12670 }, { "epoch": 0.6465099678784479, "grad_norm": 2.308821090636214, "learning_rate": 2.9350861085705694e-05, "loss": 0.9404, "step": 12680 }, { "epoch": 0.6470198337837149, "grad_norm": 4.226892987412482, "learning_rate": 2.9275690305121607e-05, "loss": 0.8856, "step": 12690 }, { "epoch": 0.6475296996889818, "grad_norm": 3.7325296122621903, "learning_rate": 2.9200576041139917e-05, "loss": 0.9357, "step": 12700 }, { "epoch": 0.6480395655942487, "grad_norm": 5.412846699260613, "learning_rate": 2.9125518498602367e-05, "loss": 0.9863, "step": 12710 }, { "epoch": 0.6485494314995156, "grad_norm": 3.713960826368952, "learning_rate": 2.905051788219597e-05, "loss": 0.9176, "step": 12720 }, { "epoch": 0.6490592974047825, "grad_norm": 3.1083005638029273, "learning_rate": 2.8975574396452487e-05, "loss": 0.9053, "step": 12730 }, { "epoch": 0.6495691633100494, "grad_norm": 2.4663160862442437, "learning_rate": 2.890068824574797e-05, "loss": 0.8443, "step": 12740 }, { "epoch": 0.6500790292153164, "grad_norm": 3.1856470594057327, "learning_rate": 2.882585963430196e-05, "loss": 0.9438, "step": 12750 }, { "epoch": 0.6505888951205833, "grad_norm": 3.733059533076023, "learning_rate": 2.875108876617726e-05, "loss": 0.8556, "step": 12760 }, { "epoch": 0.6510987610258502, "grad_norm": 2.795370045295235, "learning_rate": 2.8676375845279013e-05, "loss": 0.9295, "step": 12770 }, { "epoch": 0.6516086269311171, "grad_norm": 4.114564052755407, "learning_rate": 2.860172107535452e-05, "loss": 0.8926, "step": 12780 }, { "epoch": 0.652118492836384, "grad_norm": 3.8342475999978105, "learning_rate": 2.8527124659992388e-05, "loss": 0.9178, "step": 12790 }, { "epoch": 0.6526283587416509, "grad_norm": 3.804108407067104, "learning_rate": 2.8452586802622116e-05, "loss": 0.9454, "step": 12800 }, { "epoch": 0.6531382246469178, "grad_norm": 3.495862283582211, "learning_rate": 2.8378107706513513e-05, "loss": 0.8843, "step": 12810 }, { "epoch": 0.6536480905521848, "grad_norm": 3.1352881634440863, "learning_rate": 2.8303687574776123e-05, "loss": 0.9216, "step": 12820 }, { "epoch": 0.6541579564574517, "grad_norm": 2.897073613404968, "learning_rate": 2.8229326610358764e-05, "loss": 0.8859, "step": 12830 }, { "epoch": 0.6546678223627186, "grad_norm": 2.2408615925152193, "learning_rate": 2.8155025016048763e-05, "loss": 0.8343, "step": 12840 }, { "epoch": 0.6551776882679855, "grad_norm": 3.0453181085219496, "learning_rate": 2.8080782994471688e-05, "loss": 0.91, "step": 12850 }, { "epoch": 0.6556875541732524, "grad_norm": 2.208086711932004, "learning_rate": 2.800660074809057e-05, "loss": 0.8767, "step": 12860 }, { "epoch": 0.6561974200785193, "grad_norm": 2.5486889873141783, "learning_rate": 2.793247847920542e-05, "loss": 0.8911, "step": 12870 }, { "epoch": 0.6567072859837862, "grad_norm": 3.106151628703104, "learning_rate": 2.7858416389952723e-05, "loss": 0.9332, "step": 12880 }, { "epoch": 0.6572171518890532, "grad_norm": 3.2586925146836605, "learning_rate": 2.7784414682304832e-05, "loss": 0.9068, "step": 12890 }, { "epoch": 0.6577270177943201, "grad_norm": 3.6442338592489962, "learning_rate": 2.7710473558069437e-05, "loss": 0.8552, "step": 12900 }, { "epoch": 0.658236883699587, "grad_norm": 3.228672847008334, "learning_rate": 2.7636593218889e-05, "loss": 0.9081, "step": 12910 }, { "epoch": 0.6587467496048539, "grad_norm": 2.6403972328638683, "learning_rate": 2.7562773866240266e-05, "loss": 0.841, "step": 12920 }, { "epoch": 0.6592566155101208, "grad_norm": 3.8596616671540764, "learning_rate": 2.7489015701433606e-05, "loss": 0.8803, "step": 12930 }, { "epoch": 0.6597664814153877, "grad_norm": 2.0114828091931423, "learning_rate": 2.7415318925612564e-05, "loss": 0.8742, "step": 12940 }, { "epoch": 0.6602763473206547, "grad_norm": 2.6827974581752234, "learning_rate": 2.734168373975326e-05, "loss": 0.9224, "step": 12950 }, { "epoch": 0.6607862132259216, "grad_norm": 3.196678441619736, "learning_rate": 2.726811034466385e-05, "loss": 0.8562, "step": 12960 }, { "epoch": 0.6612960791311885, "grad_norm": 3.9594953344440054, "learning_rate": 2.719459894098397e-05, "loss": 0.9606, "step": 12970 }, { "epoch": 0.6618059450364554, "grad_norm": 3.237167790637889, "learning_rate": 2.7121149729184282e-05, "loss": 0.9294, "step": 12980 }, { "epoch": 0.6623158109417223, "grad_norm": 4.424279255609567, "learning_rate": 2.7047762909565698e-05, "loss": 0.9269, "step": 12990 }, { "epoch": 0.6628256768469892, "grad_norm": 4.143581847554707, "learning_rate": 2.6974438682259128e-05, "loss": 0.8609, "step": 13000 }, { "epoch": 0.6633355427522561, "grad_norm": 5.414971692654772, "learning_rate": 2.6901177247224705e-05, "loss": 0.9294, "step": 13010 }, { "epoch": 0.6638454086575231, "grad_norm": 2.81261394366698, "learning_rate": 2.6827978804251364e-05, "loss": 0.9249, "step": 13020 }, { "epoch": 0.66435527456279, "grad_norm": 2.4113320950008914, "learning_rate": 2.675484355295622e-05, "loss": 0.8785, "step": 13030 }, { "epoch": 0.6648651404680569, "grad_norm": 3.800653939142313, "learning_rate": 2.6681771692784063e-05, "loss": 0.9998, "step": 13040 }, { "epoch": 0.6653750063733238, "grad_norm": 3.0109208441371655, "learning_rate": 2.660876342300689e-05, "loss": 0.8705, "step": 13050 }, { "epoch": 0.6658848722785907, "grad_norm": 3.1447709022295225, "learning_rate": 2.6535818942723144e-05, "loss": 0.9503, "step": 13060 }, { "epoch": 0.6663947381838576, "grad_norm": 3.9762335762036636, "learning_rate": 2.6462938450857448e-05, "loss": 0.9115, "step": 13070 }, { "epoch": 0.6669046040891246, "grad_norm": 3.2259963956535977, "learning_rate": 2.63901221461598e-05, "loss": 0.9623, "step": 13080 }, { "epoch": 0.6674144699943915, "grad_norm": 4.073023361887602, "learning_rate": 2.6317370227205273e-05, "loss": 0.86, "step": 13090 }, { "epoch": 0.6679243358996584, "grad_norm": 3.4641317188533507, "learning_rate": 2.624468289239327e-05, "loss": 0.9318, "step": 13100 }, { "epoch": 0.6684342018049253, "grad_norm": 4.418183729852952, "learning_rate": 2.6172060339947102e-05, "loss": 0.8924, "step": 13110 }, { "epoch": 0.6689440677101922, "grad_norm": 6.2991446822572215, "learning_rate": 2.6099502767913414e-05, "loss": 0.9038, "step": 13120 }, { "epoch": 0.6694539336154591, "grad_norm": 3.1092027982406583, "learning_rate": 2.602701037416162e-05, "loss": 0.8888, "step": 13130 }, { "epoch": 0.669963799520726, "grad_norm": 2.368881359518373, "learning_rate": 2.5954583356383465e-05, "loss": 0.874, "step": 13140 }, { "epoch": 0.670473665425993, "grad_norm": 2.683827925138308, "learning_rate": 2.588222191209228e-05, "loss": 0.8619, "step": 13150 }, { "epoch": 0.6709835313312599, "grad_norm": 3.8128522107134137, "learning_rate": 2.58099262386227e-05, "loss": 0.9038, "step": 13160 }, { "epoch": 0.6714933972365268, "grad_norm": 3.239193069409403, "learning_rate": 2.5737696533129928e-05, "loss": 0.8674, "step": 13170 }, { "epoch": 0.6720032631417937, "grad_norm": 3.0916442749025648, "learning_rate": 2.5665532992589292e-05, "loss": 0.881, "step": 13180 }, { "epoch": 0.6725131290470606, "grad_norm": 4.545306586179735, "learning_rate": 2.5593435813795663e-05, "loss": 0.858, "step": 13190 }, { "epoch": 0.6730229949523275, "grad_norm": 4.702378655621528, "learning_rate": 2.5521405193362958e-05, "loss": 0.9547, "step": 13200 }, { "epoch": 0.6735328608575945, "grad_norm": 2.1072438976013212, "learning_rate": 2.5449441327723582e-05, "loss": 0.8596, "step": 13210 }, { "epoch": 0.6740427267628614, "grad_norm": 4.903565065684834, "learning_rate": 2.5377544413127875e-05, "loss": 0.9372, "step": 13220 }, { "epoch": 0.6745525926681283, "grad_norm": 3.3927449973757784, "learning_rate": 2.530571464564364e-05, "loss": 0.8379, "step": 13230 }, { "epoch": 0.6750624585733952, "grad_norm": 2.5659211244065414, "learning_rate": 2.5233952221155533e-05, "loss": 0.921, "step": 13240 }, { "epoch": 0.6755723244786621, "grad_norm": 4.793827349345645, "learning_rate": 2.516225733536457e-05, "loss": 0.9047, "step": 13250 }, { "epoch": 0.676082190383929, "grad_norm": 2.537834221579057, "learning_rate": 2.5090630183787568e-05, "loss": 0.8801, "step": 13260 }, { "epoch": 0.6765920562891959, "grad_norm": 3.3959247877590575, "learning_rate": 2.5019070961756645e-05, "loss": 0.8718, "step": 13270 }, { "epoch": 0.6771019221944629, "grad_norm": 3.7648064061843933, "learning_rate": 2.4947579864418662e-05, "loss": 0.885, "step": 13280 }, { "epoch": 0.6776117880997298, "grad_norm": 5.504751414259825, "learning_rate": 2.4876157086734754e-05, "loss": 0.9406, "step": 13290 }, { "epoch": 0.6781216540049967, "grad_norm": 2.838604719878593, "learning_rate": 2.4804802823479613e-05, "loss": 0.8941, "step": 13300 }, { "epoch": 0.6786315199102636, "grad_norm": 3.2520766775921612, "learning_rate": 2.473351726924123e-05, "loss": 0.9337, "step": 13310 }, { "epoch": 0.6791413858155305, "grad_norm": 2.745153748830726, "learning_rate": 2.4662300618420147e-05, "loss": 0.9217, "step": 13320 }, { "epoch": 0.6796512517207974, "grad_norm": 3.9717715373219784, "learning_rate": 2.4591153065229012e-05, "loss": 0.8655, "step": 13330 }, { "epoch": 0.6801611176260643, "grad_norm": 2.5987326224708784, "learning_rate": 2.4520074803692038e-05, "loss": 0.8738, "step": 13340 }, { "epoch": 0.6806709835313313, "grad_norm": 3.2422869386474638, "learning_rate": 2.4449066027644475e-05, "loss": 0.9122, "step": 13350 }, { "epoch": 0.6811808494365982, "grad_norm": 3.2192037375557496, "learning_rate": 2.4378126930732126e-05, "loss": 1.0063, "step": 13360 }, { "epoch": 0.6816907153418651, "grad_norm": 2.9091472226416477, "learning_rate": 2.4307257706410668e-05, "loss": 0.8559, "step": 13370 }, { "epoch": 0.682200581247132, "grad_norm": 2.732330251551064, "learning_rate": 2.423645854794537e-05, "loss": 0.9152, "step": 13380 }, { "epoch": 0.6827104471523989, "grad_norm": 3.9747726088439856, "learning_rate": 2.4165729648410274e-05, "loss": 0.8655, "step": 13390 }, { "epoch": 0.6832203130576658, "grad_norm": 3.9107079639719657, "learning_rate": 2.409507120068797e-05, "loss": 0.8769, "step": 13400 }, { "epoch": 0.6837301789629328, "grad_norm": 2.102310125706703, "learning_rate": 2.4024483397468818e-05, "loss": 0.9235, "step": 13410 }, { "epoch": 0.6842400448681997, "grad_norm": 4.644569446012239, "learning_rate": 2.3953966431250567e-05, "loss": 0.8647, "step": 13420 }, { "epoch": 0.6847499107734666, "grad_norm": 2.7607103784968134, "learning_rate": 2.3883520494337787e-05, "loss": 0.8993, "step": 13430 }, { "epoch": 0.6852597766787335, "grad_norm": 2.0129941683167143, "learning_rate": 2.381314577884131e-05, "loss": 0.8345, "step": 13440 }, { "epoch": 0.6857696425840004, "grad_norm": 3.6299635841779643, "learning_rate": 2.3742842476677834e-05, "loss": 0.9076, "step": 13450 }, { "epoch": 0.6862795084892673, "grad_norm": 3.340608366170006, "learning_rate": 2.3672610779569177e-05, "loss": 0.9382, "step": 13460 }, { "epoch": 0.6867893743945342, "grad_norm": 2.6640065743686927, "learning_rate": 2.3602450879042003e-05, "loss": 0.8252, "step": 13470 }, { "epoch": 0.6872992402998012, "grad_norm": 3.033855625931912, "learning_rate": 2.3532362966427118e-05, "loss": 0.8373, "step": 13480 }, { "epoch": 0.6878091062050681, "grad_norm": 2.2459827213653343, "learning_rate": 2.3462347232859026e-05, "loss": 0.8251, "step": 13490 }, { "epoch": 0.688318972110335, "grad_norm": 3.6492834701278656, "learning_rate": 2.3392403869275404e-05, "loss": 0.8608, "step": 13500 }, { "epoch": 0.6888288380156019, "grad_norm": 3.6390410751753874, "learning_rate": 2.3322533066416547e-05, "loss": 0.8952, "step": 13510 }, { "epoch": 0.6893387039208688, "grad_norm": 2.3085856898721326, "learning_rate": 2.3252735014824883e-05, "loss": 0.8678, "step": 13520 }, { "epoch": 0.6898485698261357, "grad_norm": 4.082707964001825, "learning_rate": 2.3183009904844484e-05, "loss": 0.8718, "step": 13530 }, { "epoch": 0.6903584357314027, "grad_norm": 3.821054464677834, "learning_rate": 2.3113357926620445e-05, "loss": 0.9594, "step": 13540 }, { "epoch": 0.6908683016366696, "grad_norm": 3.001023761538541, "learning_rate": 2.304377927009844e-05, "loss": 0.8978, "step": 13550 }, { "epoch": 0.6913781675419365, "grad_norm": 2.8298450839958313, "learning_rate": 2.297427412502422e-05, "loss": 0.869, "step": 13560 }, { "epoch": 0.6918880334472034, "grad_norm": 2.836530215019172, "learning_rate": 2.2904842680943027e-05, "loss": 0.9088, "step": 13570 }, { "epoch": 0.6923978993524703, "grad_norm": 4.180377223929396, "learning_rate": 2.2835485127199135e-05, "loss": 0.9533, "step": 13580 }, { "epoch": 0.6929077652577372, "grad_norm": 4.670221434880893, "learning_rate": 2.2766201652935305e-05, "loss": 0.9291, "step": 13590 }, { "epoch": 0.6934176311630041, "grad_norm": 3.516604042302902, "learning_rate": 2.2696992447092326e-05, "loss": 0.8877, "step": 13600 }, { "epoch": 0.6939274970682711, "grad_norm": 2.4900284887748985, "learning_rate": 2.2627857698408338e-05, "loss": 0.8219, "step": 13610 }, { "epoch": 0.694437362973538, "grad_norm": 4.913835928490447, "learning_rate": 2.2558797595418585e-05, "loss": 0.9116, "step": 13620 }, { "epoch": 0.6949472288788049, "grad_norm": 2.9055182560506125, "learning_rate": 2.248981232645459e-05, "loss": 0.9088, "step": 13630 }, { "epoch": 0.6954570947840718, "grad_norm": 3.3527635882219475, "learning_rate": 2.242090207964393e-05, "loss": 0.8809, "step": 13640 }, { "epoch": 0.6959669606893387, "grad_norm": 3.2573139563374136, "learning_rate": 2.2352067042909512e-05, "loss": 0.9004, "step": 13650 }, { "epoch": 0.6964768265946056, "grad_norm": 2.304191763039682, "learning_rate": 2.2283307403969155e-05, "loss": 0.8939, "step": 13660 }, { "epoch": 0.6969866924998726, "grad_norm": 4.007114973416113, "learning_rate": 2.2214623350335116e-05, "loss": 0.9064, "step": 13670 }, { "epoch": 0.6974965584051395, "grad_norm": 3.6062362685383524, "learning_rate": 2.2146015069313426e-05, "loss": 0.8528, "step": 13680 }, { "epoch": 0.6980064243104064, "grad_norm": 5.213944919568216, "learning_rate": 2.2077482748003585e-05, "loss": 0.9015, "step": 13690 }, { "epoch": 0.6985162902156733, "grad_norm": 4.001983770515366, "learning_rate": 2.200902657329784e-05, "loss": 0.862, "step": 13700 }, { "epoch": 0.6990261561209402, "grad_norm": 2.5866784871880375, "learning_rate": 2.194064673188089e-05, "loss": 0.8496, "step": 13710 }, { "epoch": 0.6995360220262071, "grad_norm": 3.292858427289412, "learning_rate": 2.1872343410229178e-05, "loss": 0.8766, "step": 13720 }, { "epoch": 0.700045887931474, "grad_norm": 2.7369948622677964, "learning_rate": 2.1804116794610536e-05, "loss": 0.9164, "step": 13730 }, { "epoch": 0.700555753836741, "grad_norm": 3.266312647688562, "learning_rate": 2.1735967071083567e-05, "loss": 0.9414, "step": 13740 }, { "epoch": 0.7010656197420079, "grad_norm": 4.511476098623222, "learning_rate": 2.1667894425497192e-05, "loss": 0.9279, "step": 13750 }, { "epoch": 0.7015754856472748, "grad_norm": 4.531016473727997, "learning_rate": 2.1599899043490195e-05, "loss": 0.9571, "step": 13760 }, { "epoch": 0.7020853515525417, "grad_norm": 2.912246011878129, "learning_rate": 2.153198111049055e-05, "loss": 0.8981, "step": 13770 }, { "epoch": 0.7025952174578086, "grad_norm": 2.692086122196558, "learning_rate": 2.1464140811715122e-05, "loss": 0.897, "step": 13780 }, { "epoch": 0.7031050833630755, "grad_norm": 3.3648953304431592, "learning_rate": 2.1396378332169002e-05, "loss": 0.8253, "step": 13790 }, { "epoch": 0.7036149492683424, "grad_norm": 2.734002922156205, "learning_rate": 2.1328693856645093e-05, "loss": 0.8855, "step": 13800 }, { "epoch": 0.7041248151736094, "grad_norm": 3.2681411269094034, "learning_rate": 2.126108756972356e-05, "loss": 0.9766, "step": 13810 }, { "epoch": 0.7046346810788763, "grad_norm": 2.2947043895088592, "learning_rate": 2.119355965577134e-05, "loss": 0.8715, "step": 13820 }, { "epoch": 0.7051445469841432, "grad_norm": 3.519515502644016, "learning_rate": 2.1126110298941635e-05, "loss": 0.8414, "step": 13830 }, { "epoch": 0.7056544128894101, "grad_norm": 2.5251922047442816, "learning_rate": 2.1058739683173484e-05, "loss": 0.8371, "step": 13840 }, { "epoch": 0.706164278794677, "grad_norm": 3.18827897183748, "learning_rate": 2.0991447992191066e-05, "loss": 0.8544, "step": 13850 }, { "epoch": 0.7066741446999439, "grad_norm": 6.902658123270844, "learning_rate": 2.0924235409503456e-05, "loss": 0.8439, "step": 13860 }, { "epoch": 0.7071840106052109, "grad_norm": 4.265874028316936, "learning_rate": 2.0857102118403926e-05, "loss": 0.8546, "step": 13870 }, { "epoch": 0.7076938765104778, "grad_norm": 2.743027794826126, "learning_rate": 2.0790048301969516e-05, "loss": 0.9121, "step": 13880 }, { "epoch": 0.7082037424157447, "grad_norm": 4.956407628795556, "learning_rate": 2.0723074143060555e-05, "loss": 0.9309, "step": 13890 }, { "epoch": 0.7087136083210116, "grad_norm": 6.2790601741914625, "learning_rate": 2.065617982432011e-05, "loss": 0.9223, "step": 13900 }, { "epoch": 0.7092234742262785, "grad_norm": 3.3010818174838623, "learning_rate": 2.058936552817359e-05, "loss": 0.8884, "step": 13910 }, { "epoch": 0.7097333401315454, "grad_norm": 2.638150017954024, "learning_rate": 2.0522631436828054e-05, "loss": 0.8843, "step": 13920 }, { "epoch": 0.7102432060368123, "grad_norm": 3.703275922208582, "learning_rate": 2.0455977732271993e-05, "loss": 0.884, "step": 13930 }, { "epoch": 0.7107530719420793, "grad_norm": 4.8064675843188125, "learning_rate": 2.0389404596274504e-05, "loss": 0.8608, "step": 13940 }, { "epoch": 0.7112629378473462, "grad_norm": 3.1860775078031036, "learning_rate": 2.032291221038512e-05, "loss": 0.9085, "step": 13950 }, { "epoch": 0.7117728037526131, "grad_norm": 2.776924095859808, "learning_rate": 2.025650075593309e-05, "loss": 0.8779, "step": 13960 }, { "epoch": 0.71228266965788, "grad_norm": 2.7945055013783913, "learning_rate": 2.0190170414026936e-05, "loss": 0.8974, "step": 13970 }, { "epoch": 0.7127925355631469, "grad_norm": 3.158074061515128, "learning_rate": 2.0123921365554076e-05, "loss": 0.8585, "step": 13980 }, { "epoch": 0.7133024014684138, "grad_norm": 2.979085824512164, "learning_rate": 2.00577537911801e-05, "loss": 0.8869, "step": 13990 }, { "epoch": 0.7138122673736808, "grad_norm": 3.1160926759671703, "learning_rate": 1.9991667871348556e-05, "loss": 0.8311, "step": 14000 }, { "epoch": 0.7143221332789477, "grad_norm": 3.1536851693241785, "learning_rate": 1.992566378628017e-05, "loss": 0.8893, "step": 14010 }, { "epoch": 0.7148319991842146, "grad_norm": 4.285537613384988, "learning_rate": 1.9859741715972625e-05, "loss": 0.8785, "step": 14020 }, { "epoch": 0.7153418650894815, "grad_norm": 2.9064731109977875, "learning_rate": 1.9793901840199875e-05, "loss": 0.8297, "step": 14030 }, { "epoch": 0.7158517309947484, "grad_norm": 2.5062565115060615, "learning_rate": 1.972814433851174e-05, "loss": 0.8612, "step": 14040 }, { "epoch": 0.7163615969000153, "grad_norm": 3.7803527969691237, "learning_rate": 1.96624693902334e-05, "loss": 0.8716, "step": 14050 }, { "epoch": 0.7168714628052822, "grad_norm": 2.7454729847134347, "learning_rate": 1.959687717446491e-05, "loss": 0.82, "step": 14060 }, { "epoch": 0.7173813287105492, "grad_norm": 3.93862340335549, "learning_rate": 1.9531367870080687e-05, "loss": 0.8605, "step": 14070 }, { "epoch": 0.7178911946158161, "grad_norm": 2.292545994488362, "learning_rate": 1.9465941655729087e-05, "loss": 0.8961, "step": 14080 }, { "epoch": 0.718401060521083, "grad_norm": 2.934203360637517, "learning_rate": 1.940059870983184e-05, "loss": 0.8075, "step": 14090 }, { "epoch": 0.7189109264263499, "grad_norm": 4.125718921369046, "learning_rate": 1.9335339210583603e-05, "loss": 0.9167, "step": 14100 }, { "epoch": 0.7194207923316168, "grad_norm": 3.872383869661933, "learning_rate": 1.9270163335951467e-05, "loss": 0.8645, "step": 14110 }, { "epoch": 0.7199306582368837, "grad_norm": 2.349309012944774, "learning_rate": 1.920507126367448e-05, "loss": 0.919, "step": 14120 }, { "epoch": 0.7204405241421507, "grad_norm": 2.7003543700050936, "learning_rate": 1.9140063171263145e-05, "loss": 0.8824, "step": 14130 }, { "epoch": 0.7209503900474176, "grad_norm": 2.7056845960872304, "learning_rate": 1.907513923599894e-05, "loss": 0.8967, "step": 14140 }, { "epoch": 0.7214602559526845, "grad_norm": 3.9644399914535824, "learning_rate": 1.9010299634933902e-05, "loss": 0.8854, "step": 14150 }, { "epoch": 0.7219701218579514, "grad_norm": 2.7756561472506913, "learning_rate": 1.8945544544889977e-05, "loss": 0.8218, "step": 14160 }, { "epoch": 0.7224799877632183, "grad_norm": 5.018500044878095, "learning_rate": 1.888087414245873e-05, "loss": 0.8744, "step": 14170 }, { "epoch": 0.7229898536684852, "grad_norm": 2.296518150657654, "learning_rate": 1.8816288604000755e-05, "loss": 0.8981, "step": 14180 }, { "epoch": 0.7234997195737521, "grad_norm": 2.5659517189639183, "learning_rate": 1.8751788105645174e-05, "loss": 0.9454, "step": 14190 }, { "epoch": 0.7240095854790191, "grad_norm": 3.8505739486429826, "learning_rate": 1.8687372823289296e-05, "loss": 0.868, "step": 14200 }, { "epoch": 0.724519451384286, "grad_norm": 3.8879727455834847, "learning_rate": 1.86230429325979e-05, "loss": 0.8202, "step": 14210 }, { "epoch": 0.7250293172895529, "grad_norm": 2.7035004153604034, "learning_rate": 1.8558798609003052e-05, "loss": 0.9175, "step": 14220 }, { "epoch": 0.7255391831948198, "grad_norm": 3.0272382276993306, "learning_rate": 1.8494640027703324e-05, "loss": 0.8689, "step": 14230 }, { "epoch": 0.7260490491000867, "grad_norm": 3.3846551926860147, "learning_rate": 1.8430567363663597e-05, "loss": 0.846, "step": 14240 }, { "epoch": 0.7265589150053536, "grad_norm": 3.2673408549133502, "learning_rate": 1.8366580791614312e-05, "loss": 0.911, "step": 14250 }, { "epoch": 0.7270687809106205, "grad_norm": 3.2139816945562423, "learning_rate": 1.8302680486051265e-05, "loss": 0.9208, "step": 14260 }, { "epoch": 0.7275786468158875, "grad_norm": 5.368949554679828, "learning_rate": 1.8238866621234912e-05, "loss": 0.8395, "step": 14270 }, { "epoch": 0.7280885127211544, "grad_norm": 3.438148967967155, "learning_rate": 1.817513937118999e-05, "loss": 0.8703, "step": 14280 }, { "epoch": 0.7285983786264213, "grad_norm": 2.5803615764853287, "learning_rate": 1.8111498909705093e-05, "loss": 0.8614, "step": 14290 }, { "epoch": 0.7291082445316882, "grad_norm": 4.9536047487293935, "learning_rate": 1.804794541033203e-05, "loss": 0.8683, "step": 14300 }, { "epoch": 0.7296181104369551, "grad_norm": 2.527176490475443, "learning_rate": 1.7984479046385557e-05, "loss": 0.9015, "step": 14310 }, { "epoch": 0.730127976342222, "grad_norm": 2.304452984322168, "learning_rate": 1.792109999094275e-05, "loss": 0.8531, "step": 14320 }, { "epoch": 0.730637842247489, "grad_norm": 3.681104743558922, "learning_rate": 1.7857808416842615e-05, "loss": 0.8987, "step": 14330 }, { "epoch": 0.7311477081527559, "grad_norm": 4.383437339124014, "learning_rate": 1.7794604496685573e-05, "loss": 0.8218, "step": 14340 }, { "epoch": 0.7316575740580228, "grad_norm": 3.2501961003202005, "learning_rate": 1.773148840283301e-05, "loss": 0.8854, "step": 14350 }, { "epoch": 0.7321674399632897, "grad_norm": 2.9076586758259197, "learning_rate": 1.766846030740682e-05, "loss": 0.8815, "step": 14360 }, { "epoch": 0.7326773058685566, "grad_norm": 2.169529339937437, "learning_rate": 1.7605520382288897e-05, "loss": 0.8767, "step": 14370 }, { "epoch": 0.7331871717738235, "grad_norm": 3.7153738750289653, "learning_rate": 1.7542668799120688e-05, "loss": 0.9442, "step": 14380 }, { "epoch": 0.7336970376790904, "grad_norm": 3.050860282460858, "learning_rate": 1.747990572930276e-05, "loss": 0.9095, "step": 14390 }, { "epoch": 0.7342069035843574, "grad_norm": 5.002041841328034, "learning_rate": 1.7417231343994267e-05, "loss": 0.914, "step": 14400 }, { "epoch": 0.7347167694896243, "grad_norm": 2.9088511401907766, "learning_rate": 1.735464581411252e-05, "loss": 0.8855, "step": 14410 }, { "epoch": 0.7352266353948912, "grad_norm": 3.7862878073377937, "learning_rate": 1.7292149310332513e-05, "loss": 0.8165, "step": 14420 }, { "epoch": 0.7357365013001581, "grad_norm": 2.7539436495085603, "learning_rate": 1.7229742003086463e-05, "loss": 0.8843, "step": 14430 }, { "epoch": 0.736246367205425, "grad_norm": 3.1947822977994984, "learning_rate": 1.7167424062563348e-05, "loss": 0.8858, "step": 14440 }, { "epoch": 0.7367562331106919, "grad_norm": 4.095285565190369, "learning_rate": 1.7105195658708412e-05, "loss": 0.9358, "step": 14450 }, { "epoch": 0.7372660990159589, "grad_norm": 2.7962148323450693, "learning_rate": 1.7043056961222796e-05, "loss": 0.8718, "step": 14460 }, { "epoch": 0.7377759649212258, "grad_norm": 2.7978242591498623, "learning_rate": 1.698100813956289e-05, "loss": 0.9113, "step": 14470 }, { "epoch": 0.7382858308264927, "grad_norm": 2.5347340645021075, "learning_rate": 1.691904936294009e-05, "loss": 0.842, "step": 14480 }, { "epoch": 0.7387956967317596, "grad_norm": 3.7318593907692277, "learning_rate": 1.68571808003202e-05, "loss": 0.9026, "step": 14490 }, { "epoch": 0.7393055626370265, "grad_norm": 3.219925944875457, "learning_rate": 1.6795402620422968e-05, "loss": 0.8568, "step": 14500 }, { "epoch": 0.7398154285422934, "grad_norm": 3.5941302291020305, "learning_rate": 1.673371499172174e-05, "loss": 0.8702, "step": 14510 }, { "epoch": 0.7403252944475603, "grad_norm": 9.173447786756226, "learning_rate": 1.6672118082442823e-05, "loss": 0.8915, "step": 14520 }, { "epoch": 0.7408351603528273, "grad_norm": 2.3033144486720065, "learning_rate": 1.6610612060565234e-05, "loss": 0.9019, "step": 14530 }, { "epoch": 0.7413450262580942, "grad_norm": 3.624696375079677, "learning_rate": 1.6549197093820018e-05, "loss": 0.8692, "step": 14540 }, { "epoch": 0.7418548921633611, "grad_norm": 2.8557781803819506, "learning_rate": 1.6487873349690032e-05, "loss": 0.8348, "step": 14550 }, { "epoch": 0.742364758068628, "grad_norm": 3.6671225315913323, "learning_rate": 1.6426640995409232e-05, "loss": 0.8822, "step": 14560 }, { "epoch": 0.7428746239738949, "grad_norm": 2.796368915798219, "learning_rate": 1.6365500197962463e-05, "loss": 0.881, "step": 14570 }, { "epoch": 0.7433844898791617, "grad_norm": 2.750712257117753, "learning_rate": 1.6304451124084823e-05, "loss": 0.8599, "step": 14580 }, { "epoch": 0.7438943557844286, "grad_norm": 4.038589338654805, "learning_rate": 1.6243493940261295e-05, "loss": 0.8748, "step": 14590 }, { "epoch": 0.7444042216896956, "grad_norm": 4.197868213550896, "learning_rate": 1.618262881272626e-05, "loss": 0.8556, "step": 14600 }, { "epoch": 0.7449140875949625, "grad_norm": 3.061273404676564, "learning_rate": 1.612185590746304e-05, "loss": 0.8258, "step": 14610 }, { "epoch": 0.7454239535002294, "grad_norm": 5.218000571306212, "learning_rate": 1.6061175390203525e-05, "loss": 0.8827, "step": 14620 }, { "epoch": 0.7459338194054963, "grad_norm": 2.494457198033094, "learning_rate": 1.6000587426427592e-05, "loss": 0.8294, "step": 14630 }, { "epoch": 0.7464436853107632, "grad_norm": 3.774928589841261, "learning_rate": 1.5940092181362742e-05, "loss": 0.8692, "step": 14640 }, { "epoch": 0.7469535512160301, "grad_norm": 5.045548659651567, "learning_rate": 1.5879689819983627e-05, "loss": 0.8996, "step": 14650 }, { "epoch": 0.747463417121297, "grad_norm": 3.43630615014554, "learning_rate": 1.581938050701159e-05, "loss": 0.8446, "step": 14660 }, { "epoch": 0.747973283026564, "grad_norm": 2.9483501469705296, "learning_rate": 1.575916440691423e-05, "loss": 0.8824, "step": 14670 }, { "epoch": 0.7484831489318309, "grad_norm": 3.744326663008319, "learning_rate": 1.569904168390495e-05, "loss": 0.9082, "step": 14680 }, { "epoch": 0.7489930148370978, "grad_norm": 3.346178111816128, "learning_rate": 1.5639012501942495e-05, "loss": 0.8943, "step": 14690 }, { "epoch": 0.7495028807423647, "grad_norm": 2.705456054021861, "learning_rate": 1.5579077024730555e-05, "loss": 0.8203, "step": 14700 }, { "epoch": 0.7500127466476316, "grad_norm": 5.027037292349675, "learning_rate": 1.5519235415717236e-05, "loss": 0.8954, "step": 14710 }, { "epoch": 0.7505226125528985, "grad_norm": 3.1064826233355918, "learning_rate": 1.5459487838094677e-05, "loss": 0.8163, "step": 14720 }, { "epoch": 0.7510324784581655, "grad_norm": 2.694475571297849, "learning_rate": 1.5399834454798594e-05, "loss": 0.8843, "step": 14730 }, { "epoch": 0.7515423443634324, "grad_norm": 3.327058419243026, "learning_rate": 1.53402754285078e-05, "loss": 0.8939, "step": 14740 }, { "epoch": 0.7520522102686993, "grad_norm": 6.102691369797055, "learning_rate": 1.5280810921643856e-05, "loss": 0.8729, "step": 14750 }, { "epoch": 0.7525620761739662, "grad_norm": 2.931116244008933, "learning_rate": 1.5221441096370459e-05, "loss": 0.8847, "step": 14760 }, { "epoch": 0.7530719420792331, "grad_norm": 3.2689777653057277, "learning_rate": 1.516216611459323e-05, "loss": 0.9207, "step": 14770 }, { "epoch": 0.7535818079845, "grad_norm": 4.005422842172153, "learning_rate": 1.5102986137959001e-05, "loss": 0.8815, "step": 14780 }, { "epoch": 0.754091673889767, "grad_norm": 3.500402263868153, "learning_rate": 1.5043901327855647e-05, "loss": 0.8342, "step": 14790 }, { "epoch": 0.7546015397950339, "grad_norm": 3.9180680848894043, "learning_rate": 1.4984911845411453e-05, "loss": 0.9318, "step": 14800 }, { "epoch": 0.7551114057003008, "grad_norm": 2.6078203177003796, "learning_rate": 1.492601785149475e-05, "loss": 0.878, "step": 14810 }, { "epoch": 0.7556212716055677, "grad_norm": 3.8300001695915973, "learning_rate": 1.4867219506713458e-05, "loss": 0.958, "step": 14820 }, { "epoch": 0.7561311375108346, "grad_norm": 3.3155999260231788, "learning_rate": 1.480851697141466e-05, "loss": 0.8031, "step": 14830 }, { "epoch": 0.7566410034161015, "grad_norm": 5.93548750073978, "learning_rate": 1.4749910405684197e-05, "loss": 0.8614, "step": 14840 }, { "epoch": 0.7571508693213684, "grad_norm": 8.015838481204414, "learning_rate": 1.469139996934611e-05, "loss": 0.8456, "step": 14850 }, { "epoch": 0.7576607352266354, "grad_norm": 2.719745178839152, "learning_rate": 1.463298582196238e-05, "loss": 0.8895, "step": 14860 }, { "epoch": 0.7581706011319023, "grad_norm": 4.1208125735007775, "learning_rate": 1.4574668122832346e-05, "loss": 0.8314, "step": 14870 }, { "epoch": 0.7586804670371692, "grad_norm": 5.103220951819016, "learning_rate": 1.4516447030992342e-05, "loss": 0.8828, "step": 14880 }, { "epoch": 0.7591903329424361, "grad_norm": 4.2279259418311765, "learning_rate": 1.4458322705215244e-05, "loss": 0.949, "step": 14890 }, { "epoch": 0.759700198847703, "grad_norm": 3.6821822208934, "learning_rate": 1.4400295304010041e-05, "loss": 0.9127, "step": 14900 }, { "epoch": 0.7602100647529699, "grad_norm": 4.433950310983757, "learning_rate": 1.434236498562141e-05, "loss": 0.8777, "step": 14910 }, { "epoch": 0.7607199306582368, "grad_norm": 3.258546059081396, "learning_rate": 1.4284531908029253e-05, "loss": 0.8015, "step": 14920 }, { "epoch": 0.7612297965635038, "grad_norm": 2.3659649654317016, "learning_rate": 1.4226796228948337e-05, "loss": 0.851, "step": 14930 }, { "epoch": 0.7617396624687707, "grad_norm": 2.2409439689317914, "learning_rate": 1.4169158105827768e-05, "loss": 0.8608, "step": 14940 }, { "epoch": 0.7622495283740376, "grad_norm": 3.0715204978103725, "learning_rate": 1.4111617695850631e-05, "loss": 0.7935, "step": 14950 }, { "epoch": 0.7627593942793045, "grad_norm": 4.267005282076962, "learning_rate": 1.4054175155933541e-05, "loss": 0.9154, "step": 14960 }, { "epoch": 0.7632692601845714, "grad_norm": 2.4892490009386243, "learning_rate": 1.399683064272621e-05, "loss": 0.8468, "step": 14970 }, { "epoch": 0.7637791260898383, "grad_norm": 3.031762626944372, "learning_rate": 1.3939584312611015e-05, "loss": 0.8229, "step": 14980 }, { "epoch": 0.7642889919951052, "grad_norm": 2.7553302663332615, "learning_rate": 1.388243632170263e-05, "loss": 0.8596, "step": 14990 }, { "epoch": 0.7647988579003722, "grad_norm": 3.4874566989678146, "learning_rate": 1.3825386825847459e-05, "loss": 0.8195, "step": 15000 }, { "epoch": 0.7653087238056391, "grad_norm": 2.408476831561337, "learning_rate": 1.3768435980623407e-05, "loss": 0.8469, "step": 15010 }, { "epoch": 0.765818589710906, "grad_norm": 4.117755860054798, "learning_rate": 1.371158394133928e-05, "loss": 0.967, "step": 15020 }, { "epoch": 0.7663284556161729, "grad_norm": 4.81413553922508, "learning_rate": 1.3654830863034462e-05, "loss": 0.8602, "step": 15030 }, { "epoch": 0.7668383215214398, "grad_norm": 2.831741053689239, "learning_rate": 1.359817690047846e-05, "loss": 0.8479, "step": 15040 }, { "epoch": 0.7673481874267067, "grad_norm": 3.744551247110337, "learning_rate": 1.3541622208170467e-05, "loss": 0.8596, "step": 15050 }, { "epoch": 0.7678580533319737, "grad_norm": 3.7148036923374574, "learning_rate": 1.3485166940339016e-05, "loss": 0.9524, "step": 15060 }, { "epoch": 0.7683679192372406, "grad_norm": 4.335382354236005, "learning_rate": 1.3428811250941414e-05, "loss": 0.9149, "step": 15070 }, { "epoch": 0.7688777851425075, "grad_norm": 2.9316183575322983, "learning_rate": 1.3372555293663514e-05, "loss": 0.8977, "step": 15080 }, { "epoch": 0.7693876510477744, "grad_norm": 3.10946155411863, "learning_rate": 1.3316399221919074e-05, "loss": 0.8478, "step": 15090 }, { "epoch": 0.7698975169530413, "grad_norm": 4.72995402941815, "learning_rate": 1.3260343188849572e-05, "loss": 0.8608, "step": 15100 }, { "epoch": 0.7704073828583082, "grad_norm": 3.2196110816338464, "learning_rate": 1.320438734732361e-05, "loss": 0.8689, "step": 15110 }, { "epoch": 0.7709172487635751, "grad_norm": 2.5940976010805183, "learning_rate": 1.3148531849936574e-05, "loss": 0.919, "step": 15120 }, { "epoch": 0.7714271146688421, "grad_norm": 3.5281290333681996, "learning_rate": 1.3092776849010202e-05, "loss": 0.8215, "step": 15130 }, { "epoch": 0.771936980574109, "grad_norm": 2.3083401803024284, "learning_rate": 1.3037122496592164e-05, "loss": 0.807, "step": 15140 }, { "epoch": 0.7724468464793759, "grad_norm": 4.48237544587891, "learning_rate": 1.2981568944455707e-05, "loss": 0.8607, "step": 15150 }, { "epoch": 0.7729567123846428, "grad_norm": 5.2965861701358214, "learning_rate": 1.2926116344099087e-05, "loss": 0.8734, "step": 15160 }, { "epoch": 0.7734665782899097, "grad_norm": 2.2086316730073556, "learning_rate": 1.2870764846745365e-05, "loss": 0.8035, "step": 15170 }, { "epoch": 0.7739764441951766, "grad_norm": 4.2610353748951955, "learning_rate": 1.2815514603341822e-05, "loss": 0.8885, "step": 15180 }, { "epoch": 0.7744863101004436, "grad_norm": 2.7360133633418293, "learning_rate": 1.2760365764559634e-05, "loss": 0.9074, "step": 15190 }, { "epoch": 0.7749961760057105, "grad_norm": 3.9738716131027885, "learning_rate": 1.2705318480793432e-05, "loss": 0.8907, "step": 15200 }, { "epoch": 0.7755060419109774, "grad_norm": 2.954769888251, "learning_rate": 1.2650372902160906e-05, "loss": 0.8772, "step": 15210 }, { "epoch": 0.7760159078162443, "grad_norm": 2.1621609292566855, "learning_rate": 1.2595529178502386e-05, "loss": 0.8465, "step": 15220 }, { "epoch": 0.7765257737215112, "grad_norm": 3.010115001320829, "learning_rate": 1.2540787459380421e-05, "loss": 0.8738, "step": 15230 }, { "epoch": 0.7770356396267781, "grad_norm": 2.762540298355899, "learning_rate": 1.2486147894079442e-05, "loss": 0.8319, "step": 15240 }, { "epoch": 0.777545505532045, "grad_norm": 4.092547126744568, "learning_rate": 1.2431610631605234e-05, "loss": 0.8491, "step": 15250 }, { "epoch": 0.778055371437312, "grad_norm": 2.8888610964288404, "learning_rate": 1.2377175820684628e-05, "loss": 0.7645, "step": 15260 }, { "epoch": 0.7785652373425789, "grad_norm": 3.8152499348556907, "learning_rate": 1.2322843609765056e-05, "loss": 0.9073, "step": 15270 }, { "epoch": 0.7790751032478458, "grad_norm": 3.1391522021865192, "learning_rate": 1.2268614147014152e-05, "loss": 0.9408, "step": 15280 }, { "epoch": 0.7795849691531127, "grad_norm": 2.7293474454182705, "learning_rate": 1.2214487580319334e-05, "loss": 0.9204, "step": 15290 }, { "epoch": 0.7800948350583796, "grad_norm": 4.976415538766265, "learning_rate": 1.2160464057287479e-05, "loss": 0.8978, "step": 15300 }, { "epoch": 0.7806047009636465, "grad_norm": 2.3275484642450968, "learning_rate": 1.2106543725244357e-05, "loss": 0.8873, "step": 15310 }, { "epoch": 0.7811145668689135, "grad_norm": 2.300791730744753, "learning_rate": 1.2052726731234409e-05, "loss": 0.8828, "step": 15320 }, { "epoch": 0.7816244327741804, "grad_norm": 3.2802508324260646, "learning_rate": 1.199901322202024e-05, "loss": 0.9419, "step": 15330 }, { "epoch": 0.7821342986794473, "grad_norm": 3.410662489857846, "learning_rate": 1.1945403344082234e-05, "loss": 0.8756, "step": 15340 }, { "epoch": 0.7826441645847142, "grad_norm": 3.4264529388087506, "learning_rate": 1.1891897243618182e-05, "loss": 0.9008, "step": 15350 }, { "epoch": 0.7831540304899811, "grad_norm": 2.8127713515538724, "learning_rate": 1.1838495066542842e-05, "loss": 0.9028, "step": 15360 }, { "epoch": 0.783663896395248, "grad_norm": 2.641184167083469, "learning_rate": 1.1785196958487627e-05, "loss": 0.9116, "step": 15370 }, { "epoch": 0.7841737623005149, "grad_norm": 3.372592168887771, "learning_rate": 1.1732003064800045e-05, "loss": 0.816, "step": 15380 }, { "epoch": 0.7846836282057819, "grad_norm": 4.8521661786297265, "learning_rate": 1.1678913530543523e-05, "loss": 0.8601, "step": 15390 }, { "epoch": 0.7851934941110488, "grad_norm": 2.9947310910954843, "learning_rate": 1.1625928500496774e-05, "loss": 0.8873, "step": 15400 }, { "epoch": 0.7857033600163157, "grad_norm": 2.7372429848001567, "learning_rate": 1.1573048119153623e-05, "loss": 0.843, "step": 15410 }, { "epoch": 0.7862132259215826, "grad_norm": 4.325793719043827, "learning_rate": 1.1520272530722453e-05, "loss": 0.9278, "step": 15420 }, { "epoch": 0.7867230918268495, "grad_norm": 2.895079457895283, "learning_rate": 1.1467601879125883e-05, "loss": 0.8213, "step": 15430 }, { "epoch": 0.7872329577321164, "grad_norm": 2.673845770405087, "learning_rate": 1.1415036308000365e-05, "loss": 0.8808, "step": 15440 }, { "epoch": 0.7877428236373833, "grad_norm": 4.681838343953212, "learning_rate": 1.136257596069577e-05, "loss": 0.9322, "step": 15450 }, { "epoch": 0.7882526895426503, "grad_norm": 2.570966125797705, "learning_rate": 1.1310220980275082e-05, "loss": 0.9371, "step": 15460 }, { "epoch": 0.7887625554479172, "grad_norm": 2.2118704016933215, "learning_rate": 1.1257971509513832e-05, "loss": 0.8454, "step": 15470 }, { "epoch": 0.7892724213531841, "grad_norm": 3.3732300776354394, "learning_rate": 1.1205827690899927e-05, "loss": 0.8697, "step": 15480 }, { "epoch": 0.789782287258451, "grad_norm": 4.036479967416448, "learning_rate": 1.1153789666633096e-05, "loss": 0.8722, "step": 15490 }, { "epoch": 0.7902921531637179, "grad_norm": 3.323040700587493, "learning_rate": 1.1101857578624564e-05, "loss": 0.9044, "step": 15500 }, { "epoch": 0.7908020190689848, "grad_norm": 3.6008204869697797, "learning_rate": 1.1050031568496672e-05, "loss": 0.8795, "step": 15510 }, { "epoch": 0.7913118849742518, "grad_norm": 3.398639295785761, "learning_rate": 1.0998311777582476e-05, "loss": 0.8519, "step": 15520 }, { "epoch": 0.7918217508795187, "grad_norm": 2.233934894756387, "learning_rate": 1.094669834692535e-05, "loss": 0.8117, "step": 15530 }, { "epoch": 0.7923316167847856, "grad_norm": 2.690786504073156, "learning_rate": 1.0895191417278661e-05, "loss": 0.933, "step": 15540 }, { "epoch": 0.7928414826900525, "grad_norm": 4.0881895800161345, "learning_rate": 1.0843791129105285e-05, "loss": 0.8919, "step": 15550 }, { "epoch": 0.7933513485953194, "grad_norm": 4.155974489867237, "learning_rate": 1.0792497622577325e-05, "loss": 0.8276, "step": 15560 }, { "epoch": 0.7938612145005863, "grad_norm": 2.6755614926094147, "learning_rate": 1.074131103757564e-05, "loss": 0.8212, "step": 15570 }, { "epoch": 0.7943710804058532, "grad_norm": 3.438908813746616, "learning_rate": 1.0690231513689548e-05, "loss": 0.8987, "step": 15580 }, { "epoch": 0.7948809463111202, "grad_norm": 5.257275543448024, "learning_rate": 1.0639259190216377e-05, "loss": 0.9214, "step": 15590 }, { "epoch": 0.7953908122163871, "grad_norm": 2.7140298504555154, "learning_rate": 1.0588394206161106e-05, "loss": 0.8997, "step": 15600 }, { "epoch": 0.795900678121654, "grad_norm": 2.471384156286019, "learning_rate": 1.0537636700236053e-05, "loss": 0.7906, "step": 15610 }, { "epoch": 0.7964105440269209, "grad_norm": 4.130898081102724, "learning_rate": 1.0486986810860334e-05, "loss": 0.8608, "step": 15620 }, { "epoch": 0.7969204099321878, "grad_norm": 3.4187367338017514, "learning_rate": 1.0436444676159674e-05, "loss": 0.8403, "step": 15630 }, { "epoch": 0.7974302758374547, "grad_norm": 3.990897781034759, "learning_rate": 1.0386010433965915e-05, "loss": 0.8962, "step": 15640 }, { "epoch": 0.7979401417427217, "grad_norm": 2.9080947384924225, "learning_rate": 1.0335684221816654e-05, "loss": 0.8383, "step": 15650 }, { "epoch": 0.7984500076479886, "grad_norm": 3.940054399989881, "learning_rate": 1.0285466176954905e-05, "loss": 0.8969, "step": 15660 }, { "epoch": 0.7989598735532555, "grad_norm": 5.037355841684986, "learning_rate": 1.0235356436328675e-05, "loss": 0.9037, "step": 15670 }, { "epoch": 0.7994697394585224, "grad_norm": 2.077271792357116, "learning_rate": 1.0185355136590669e-05, "loss": 0.8897, "step": 15680 }, { "epoch": 0.7999796053637893, "grad_norm": 2.807216356773431, "learning_rate": 1.013546241409779e-05, "loss": 0.8768, "step": 15690 }, { "epoch": 0.8004894712690562, "grad_norm": 3.772436361366961, "learning_rate": 1.0085678404910936e-05, "loss": 0.8384, "step": 15700 }, { "epoch": 0.8009993371743231, "grad_norm": 3.6199871979687526, "learning_rate": 1.003600324479443e-05, "loss": 0.8623, "step": 15710 }, { "epoch": 0.8015092030795901, "grad_norm": 3.3410794161187494, "learning_rate": 9.986437069215859e-06, "loss": 0.7986, "step": 15720 }, { "epoch": 0.802019068984857, "grad_norm": 3.1639330607365226, "learning_rate": 9.936980013345543e-06, "loss": 0.7878, "step": 15730 }, { "epoch": 0.8025289348901239, "grad_norm": 3.9782955261658413, "learning_rate": 9.88763221205623e-06, "loss": 0.826, "step": 15740 }, { "epoch": 0.8030388007953908, "grad_norm": 2.787918275512712, "learning_rate": 9.838393799922735e-06, "loss": 0.8168, "step": 15750 }, { "epoch": 0.8035486667006577, "grad_norm": 3.874444869921306, "learning_rate": 9.789264911221546e-06, "loss": 0.8714, "step": 15760 }, { "epoch": 0.8040585326059246, "grad_norm": 3.0730481094982647, "learning_rate": 9.740245679930526e-06, "loss": 0.8761, "step": 15770 }, { "epoch": 0.8045683985111916, "grad_norm": 4.054732819960556, "learning_rate": 9.6913362397284e-06, "loss": 0.8234, "step": 15780 }, { "epoch": 0.8050782644164585, "grad_norm": 2.5121594783856445, "learning_rate": 9.642536723994572e-06, "loss": 0.855, "step": 15790 }, { "epoch": 0.8055881303217254, "grad_norm": 4.135088131556567, "learning_rate": 9.593847265808637e-06, "loss": 0.8791, "step": 15800 }, { "epoch": 0.8060979962269923, "grad_norm": 4.82689473053539, "learning_rate": 9.545267997950052e-06, "loss": 0.8946, "step": 15810 }, { "epoch": 0.8066078621322592, "grad_norm": 4.492685827978297, "learning_rate": 9.496799052897786e-06, "loss": 0.8565, "step": 15820 }, { "epoch": 0.8071177280375261, "grad_norm": 2.8347254300348443, "learning_rate": 9.448440562829958e-06, "loss": 0.8884, "step": 15830 }, { "epoch": 0.807627593942793, "grad_norm": 4.0989654125585275, "learning_rate": 9.400192659623442e-06, "loss": 0.8717, "step": 15840 }, { "epoch": 0.80813745984806, "grad_norm": 3.3114878534081633, "learning_rate": 9.352055474853577e-06, "loss": 0.8199, "step": 15850 }, { "epoch": 0.8086473257533269, "grad_norm": 2.3822641673943794, "learning_rate": 9.30402913979373e-06, "loss": 0.8839, "step": 15860 }, { "epoch": 0.8091571916585938, "grad_norm": 2.6897194947382714, "learning_rate": 9.256113785414983e-06, "loss": 0.8619, "step": 15870 }, { "epoch": 0.8096670575638607, "grad_norm": 2.9736849566182273, "learning_rate": 9.208309542385763e-06, "loss": 0.848, "step": 15880 }, { "epoch": 0.8101769234691276, "grad_norm": 3.591038409239247, "learning_rate": 9.160616541071498e-06, "loss": 0.909, "step": 15890 }, { "epoch": 0.8106867893743945, "grad_norm": 4.808153735781551, "learning_rate": 9.113034911534252e-06, "loss": 0.9125, "step": 15900 }, { "epoch": 0.8111966552796614, "grad_norm": 2.7296413543367017, "learning_rate": 9.065564783532337e-06, "loss": 0.918, "step": 15910 }, { "epoch": 0.8117065211849284, "grad_norm": 3.536981412579703, "learning_rate": 9.018206286520076e-06, "loss": 0.8349, "step": 15920 }, { "epoch": 0.8122163870901953, "grad_norm": 3.1011615339063376, "learning_rate": 8.970959549647256e-06, "loss": 0.8511, "step": 15930 }, { "epoch": 0.8127262529954622, "grad_norm": 3.252409169101741, "learning_rate": 8.923824701758977e-06, "loss": 0.8847, "step": 15940 }, { "epoch": 0.8132361189007291, "grad_norm": 4.455089143947698, "learning_rate": 8.876801871395162e-06, "loss": 0.9451, "step": 15950 }, { "epoch": 0.813745984805996, "grad_norm": 2.4217448895990086, "learning_rate": 8.829891186790279e-06, "loss": 0.8865, "step": 15960 }, { "epoch": 0.8142558507112629, "grad_norm": 3.589486984451676, "learning_rate": 8.783092775872947e-06, "loss": 0.8133, "step": 15970 }, { "epoch": 0.8147657166165299, "grad_norm": 2.7211659623212117, "learning_rate": 8.736406766265604e-06, "loss": 0.8709, "step": 15980 }, { "epoch": 0.8152755825217968, "grad_norm": 2.761187608907662, "learning_rate": 8.689833285284215e-06, "loss": 0.7868, "step": 15990 }, { "epoch": 0.8157854484270637, "grad_norm": 3.1331219910466808, "learning_rate": 8.643372459937782e-06, "loss": 0.8954, "step": 16000 }, { "epoch": 0.8162953143323306, "grad_norm": 3.2231351382034776, "learning_rate": 8.597024416928185e-06, "loss": 0.8256, "step": 16010 }, { "epoch": 0.8168051802375975, "grad_norm": 2.470765388519924, "learning_rate": 8.550789282649636e-06, "loss": 0.8837, "step": 16020 }, { "epoch": 0.8173150461428644, "grad_norm": 3.253600843830807, "learning_rate": 8.504667183188535e-06, "loss": 0.8608, "step": 16030 }, { "epoch": 0.8178249120481313, "grad_norm": 3.9493017734822318, "learning_rate": 8.45865824432297e-06, "loss": 0.8991, "step": 16040 }, { "epoch": 0.8183347779533983, "grad_norm": 3.8479711856362417, "learning_rate": 8.41276259152245e-06, "loss": 0.8186, "step": 16050 }, { "epoch": 0.8188446438586652, "grad_norm": 2.4952764775374447, "learning_rate": 8.366980349947534e-06, "loss": 0.849, "step": 16060 }, { "epoch": 0.8193545097639321, "grad_norm": 2.318077856843593, "learning_rate": 8.321311644449508e-06, "loss": 0.958, "step": 16070 }, { "epoch": 0.819864375669199, "grad_norm": 2.4305126243307034, "learning_rate": 8.275756599570062e-06, "loss": 0.8906, "step": 16080 }, { "epoch": 0.8203742415744659, "grad_norm": 3.4006906741716794, "learning_rate": 8.230315339540878e-06, "loss": 0.8523, "step": 16090 }, { "epoch": 0.8208841074797328, "grad_norm": 3.9703650166653026, "learning_rate": 8.184987988283377e-06, "loss": 0.9145, "step": 16100 }, { "epoch": 0.8213939733849998, "grad_norm": 3.6893958465677064, "learning_rate": 8.139774669408329e-06, "loss": 0.8816, "step": 16110 }, { "epoch": 0.8219038392902667, "grad_norm": 2.61513028940626, "learning_rate": 8.094675506215521e-06, "loss": 0.7898, "step": 16120 }, { "epoch": 0.8224137051955336, "grad_norm": 2.57873440427701, "learning_rate": 8.049690621693445e-06, "loss": 0.8591, "step": 16130 }, { "epoch": 0.8229235711008005, "grad_norm": 3.343367401705895, "learning_rate": 8.004820138518936e-06, "loss": 0.8698, "step": 16140 }, { "epoch": 0.8234334370060674, "grad_norm": 2.6164216953992203, "learning_rate": 7.960064179056853e-06, "loss": 0.8873, "step": 16150 }, { "epoch": 0.8239433029113343, "grad_norm": 5.099384224163187, "learning_rate": 7.915422865359761e-06, "loss": 0.9454, "step": 16160 }, { "epoch": 0.8244531688166012, "grad_norm": 2.708107590289228, "learning_rate": 7.870896319167548e-06, "loss": 0.8017, "step": 16170 }, { "epoch": 0.8249630347218682, "grad_norm": 2.7594442167712034, "learning_rate": 7.82648466190713e-06, "loss": 0.9691, "step": 16180 }, { "epoch": 0.8254729006271351, "grad_norm": 3.405779799598549, "learning_rate": 7.78218801469212e-06, "loss": 0.9282, "step": 16190 }, { "epoch": 0.825982766532402, "grad_norm": 2.6278792255712617, "learning_rate": 7.738006498322476e-06, "loss": 0.8982, "step": 16200 }, { "epoch": 0.8264926324376689, "grad_norm": 2.217396565318523, "learning_rate": 7.693940233284241e-06, "loss": 0.8412, "step": 16210 }, { "epoch": 0.8270024983429358, "grad_norm": 5.111239654988263, "learning_rate": 7.649989339749063e-06, "loss": 0.8672, "step": 16220 }, { "epoch": 0.8275123642482027, "grad_norm": 2.8779222319150386, "learning_rate": 7.6061539375740675e-06, "loss": 0.8975, "step": 16230 }, { "epoch": 0.8280222301534697, "grad_norm": 3.616875433509951, "learning_rate": 7.562434146301328e-06, "loss": 0.8724, "step": 16240 }, { "epoch": 0.8285320960587366, "grad_norm": 3.722391811343343, "learning_rate": 7.518830085157735e-06, "loss": 0.8684, "step": 16250 }, { "epoch": 0.8290419619640035, "grad_norm": 4.7903255788565815, "learning_rate": 7.475341873054509e-06, "loss": 0.8418, "step": 16260 }, { "epoch": 0.8295518278692704, "grad_norm": 4.933738813144349, "learning_rate": 7.4319696285869656e-06, "loss": 0.8415, "step": 16270 }, { "epoch": 0.8300616937745373, "grad_norm": 3.3060104127437553, "learning_rate": 7.38871347003417e-06, "loss": 0.8067, "step": 16280 }, { "epoch": 0.8305715596798042, "grad_norm": 3.0203188021559413, "learning_rate": 7.345573515358589e-06, "loss": 0.8202, "step": 16290 }, { "epoch": 0.8310814255850711, "grad_norm": 4.845751863789938, "learning_rate": 7.3025498822058555e-06, "loss": 0.891, "step": 16300 }, { "epoch": 0.8315912914903381, "grad_norm": 3.7309637791842105, "learning_rate": 7.259642687904294e-06, "loss": 0.8687, "step": 16310 }, { "epoch": 0.832101157395605, "grad_norm": 2.502869101004249, "learning_rate": 7.216852049464789e-06, "loss": 0.8821, "step": 16320 }, { "epoch": 0.8326110233008719, "grad_norm": 3.8998120732085537, "learning_rate": 7.1741780835802795e-06, "loss": 0.9047, "step": 16330 }, { "epoch": 0.8331208892061388, "grad_norm": 2.594922394385684, "learning_rate": 7.131620906625608e-06, "loss": 0.8632, "step": 16340 }, { "epoch": 0.8336307551114057, "grad_norm": 4.10832131941104, "learning_rate": 7.089180634657072e-06, "loss": 0.8188, "step": 16350 }, { "epoch": 0.8341406210166726, "grad_norm": 3.4778953314658176, "learning_rate": 7.046857383412192e-06, "loss": 0.9106, "step": 16360 }, { "epoch": 0.8346504869219395, "grad_norm": 3.4375785107246184, "learning_rate": 7.004651268309348e-06, "loss": 0.8377, "step": 16370 }, { "epoch": 0.8351603528272065, "grad_norm": 4.5960096164216715, "learning_rate": 6.96256240444747e-06, "loss": 0.8717, "step": 16380 }, { "epoch": 0.8356702187324734, "grad_norm": 2.867417877276137, "learning_rate": 6.92059090660579e-06, "loss": 0.8121, "step": 16390 }, { "epoch": 0.8361800846377403, "grad_norm": 3.529230966679883, "learning_rate": 6.878736889243409e-06, "loss": 0.9319, "step": 16400 }, { "epoch": 0.8366899505430072, "grad_norm": 2.977687049441769, "learning_rate": 6.8370004664990936e-06, "loss": 0.8468, "step": 16410 }, { "epoch": 0.8371998164482741, "grad_norm": 2.8806625721464036, "learning_rate": 6.795381752190904e-06, "loss": 0.8855, "step": 16420 }, { "epoch": 0.837709682353541, "grad_norm": 3.0713559435415982, "learning_rate": 6.753880859815903e-06, "loss": 0.8187, "step": 16430 }, { "epoch": 0.838219548258808, "grad_norm": 4.83372555516333, "learning_rate": 6.712497902549847e-06, "loss": 0.7915, "step": 16440 }, { "epoch": 0.8387294141640749, "grad_norm": 4.6112192242332375, "learning_rate": 6.671232993246879e-06, "loss": 0.9248, "step": 16450 }, { "epoch": 0.8392392800693418, "grad_norm": 5.174748909638641, "learning_rate": 6.630086244439204e-06, "loss": 0.8935, "step": 16460 }, { "epoch": 0.8397491459746087, "grad_norm": 2.8202028698722277, "learning_rate": 6.58905776833682e-06, "loss": 0.8285, "step": 16470 }, { "epoch": 0.8402590118798756, "grad_norm": 2.6916122398911284, "learning_rate": 6.548147676827171e-06, "loss": 0.9034, "step": 16480 }, { "epoch": 0.8407688777851425, "grad_norm": 4.025784568272635, "learning_rate": 6.5073560814748526e-06, "loss": 0.8402, "step": 16490 }, { "epoch": 0.8412787436904094, "grad_norm": 3.831845314955286, "learning_rate": 6.46668309352132e-06, "loss": 0.8344, "step": 16500 }, { "epoch": 0.8417886095956764, "grad_norm": 3.077220271058762, "learning_rate": 6.426128823884575e-06, "loss": 0.871, "step": 16510 }, { "epoch": 0.8422984755009433, "grad_norm": 2.982336543536334, "learning_rate": 6.385693383158897e-06, "loss": 0.8408, "step": 16520 }, { "epoch": 0.8428083414062102, "grad_norm": 2.760870638916319, "learning_rate": 6.34537688161444e-06, "loss": 0.8809, "step": 16530 }, { "epoch": 0.8433182073114771, "grad_norm": 4.211621126833601, "learning_rate": 6.3051794291970944e-06, "loss": 0.8935, "step": 16540 }, { "epoch": 0.843828073216744, "grad_norm": 3.531257913443177, "learning_rate": 6.265101135527995e-06, "loss": 0.9046, "step": 16550 }, { "epoch": 0.8443379391220109, "grad_norm": 3.5338096194632524, "learning_rate": 6.225142109903426e-06, "loss": 0.8521, "step": 16560 }, { "epoch": 0.8448478050272779, "grad_norm": 5.067856498419463, "learning_rate": 6.185302461294323e-06, "loss": 0.9061, "step": 16570 }, { "epoch": 0.8453576709325448, "grad_norm": 3.329892039218186, "learning_rate": 6.145582298346153e-06, "loss": 0.875, "step": 16580 }, { "epoch": 0.8458675368378117, "grad_norm": 2.274387432212045, "learning_rate": 6.1059817293784905e-06, "loss": 0.9349, "step": 16590 }, { "epoch": 0.8463774027430786, "grad_norm": 3.9161422877800294, "learning_rate": 6.066500862384772e-06, "loss": 0.807, "step": 16600 }, { "epoch": 0.8468872686483455, "grad_norm": 4.724170867775978, "learning_rate": 6.027139805032034e-06, "loss": 0.8691, "step": 16610 }, { "epoch": 0.8473971345536124, "grad_norm": 3.441993677446614, "learning_rate": 5.987898664660518e-06, "loss": 0.8585, "step": 16620 }, { "epoch": 0.8479070004588793, "grad_norm": 2.7631551181795846, "learning_rate": 5.948777548283518e-06, "loss": 0.9392, "step": 16630 }, { "epoch": 0.8484168663641463, "grad_norm": 4.39445720589836, "learning_rate": 5.909776562586966e-06, "loss": 0.8741, "step": 16640 }, { "epoch": 0.8489267322694132, "grad_norm": 4.127833421024382, "learning_rate": 5.8708958139292e-06, "loss": 0.8015, "step": 16650 }, { "epoch": 0.8494365981746801, "grad_norm": 2.8820499475149757, "learning_rate": 5.832135408340678e-06, "loss": 0.8801, "step": 16660 }, { "epoch": 0.849946464079947, "grad_norm": 1.9926941179839137, "learning_rate": 5.79349545152365e-06, "loss": 0.844, "step": 16670 }, { "epoch": 0.8504563299852139, "grad_norm": 2.4591920309937665, "learning_rate": 5.754976048851918e-06, "loss": 0.8819, "step": 16680 }, { "epoch": 0.8509661958904808, "grad_norm": 2.2649787820447678, "learning_rate": 5.716577305370496e-06, "loss": 0.846, "step": 16690 }, { "epoch": 0.8514760617957478, "grad_norm": 4.140766093918557, "learning_rate": 5.678299325795389e-06, "loss": 0.7989, "step": 16700 }, { "epoch": 0.8519859277010147, "grad_norm": 3.1458346286813548, "learning_rate": 5.64014221451325e-06, "loss": 0.8247, "step": 16710 }, { "epoch": 0.8524957936062816, "grad_norm": 3.874402034680883, "learning_rate": 5.60210607558111e-06, "loss": 0.937, "step": 16720 }, { "epoch": 0.8530056595115485, "grad_norm": 6.516766826574404, "learning_rate": 5.5641910127261055e-06, "loss": 0.8999, "step": 16730 }, { "epoch": 0.8535155254168154, "grad_norm": 3.2136111758024235, "learning_rate": 5.526397129345201e-06, "loss": 0.8773, "step": 16740 }, { "epoch": 0.8540253913220823, "grad_norm": 3.4995559414420847, "learning_rate": 5.488724528504869e-06, "loss": 0.8279, "step": 16750 }, { "epoch": 0.8545352572273492, "grad_norm": 4.12837962380521, "learning_rate": 5.451173312940888e-06, "loss": 0.8974, "step": 16760 }, { "epoch": 0.8550451231326162, "grad_norm": 2.4110487720494747, "learning_rate": 5.41374358505794e-06, "loss": 0.8523, "step": 16770 }, { "epoch": 0.8555549890378831, "grad_norm": 2.6668339141232784, "learning_rate": 5.376435446929479e-06, "loss": 0.8172, "step": 16780 }, { "epoch": 0.85606485494315, "grad_norm": 2.8935183751225986, "learning_rate": 5.339249000297292e-06, "loss": 0.8975, "step": 16790 }, { "epoch": 0.8565747208484169, "grad_norm": 3.754043446253633, "learning_rate": 5.302184346571382e-06, "loss": 0.9335, "step": 16800 }, { "epoch": 0.8570845867536838, "grad_norm": 2.7840078796206638, "learning_rate": 5.265241586829567e-06, "loss": 0.8862, "step": 16810 }, { "epoch": 0.8575944526589507, "grad_norm": 2.990380757866614, "learning_rate": 5.228420821817265e-06, "loss": 0.8463, "step": 16820 }, { "epoch": 0.8581043185642176, "grad_norm": 2.760328530084163, "learning_rate": 5.191722151947226e-06, "loss": 0.8766, "step": 16830 }, { "epoch": 0.8586141844694846, "grad_norm": 2.8718300531093033, "learning_rate": 5.1551456772991835e-06, "loss": 0.8492, "step": 16840 }, { "epoch": 0.8591240503747515, "grad_norm": 4.9401918610140925, "learning_rate": 5.118691497619715e-06, "loss": 0.872, "step": 16850 }, { "epoch": 0.8596339162800184, "grad_norm": 2.9409033090841064, "learning_rate": 5.08235971232181e-06, "loss": 0.848, "step": 16860 }, { "epoch": 0.8601437821852853, "grad_norm": 3.14064409023017, "learning_rate": 5.046150420484752e-06, "loss": 0.8149, "step": 16870 }, { "epoch": 0.8606536480905522, "grad_norm": 2.696613971341707, "learning_rate": 5.010063720853709e-06, "loss": 0.7826, "step": 16880 }, { "epoch": 0.8611635139958191, "grad_norm": 2.773127692319091, "learning_rate": 4.974099711839592e-06, "loss": 0.8794, "step": 16890 }, { "epoch": 0.8616733799010861, "grad_norm": 6.8747030079738325, "learning_rate": 4.9382584915186934e-06, "loss": 0.8331, "step": 16900 }, { "epoch": 0.862183245806353, "grad_norm": 3.695685522208731, "learning_rate": 4.9025401576324456e-06, "loss": 0.8874, "step": 16910 }, { "epoch": 0.8626931117116199, "grad_norm": 3.260766796388533, "learning_rate": 4.866944807587209e-06, "loss": 0.8527, "step": 16920 }, { "epoch": 0.8632029776168868, "grad_norm": 3.759808703610138, "learning_rate": 4.83147253845388e-06, "loss": 0.8405, "step": 16930 }, { "epoch": 0.8637128435221537, "grad_norm": 3.003698027126189, "learning_rate": 4.796123446967787e-06, "loss": 0.8812, "step": 16940 }, { "epoch": 0.8642227094274206, "grad_norm": 3.121755258754485, "learning_rate": 4.760897629528288e-06, "loss": 0.7932, "step": 16950 }, { "epoch": 0.8647325753326875, "grad_norm": 3.27344701554323, "learning_rate": 4.725795182198589e-06, "loss": 0.8311, "step": 16960 }, { "epoch": 0.8652424412379545, "grad_norm": 3.149376629291201, "learning_rate": 4.690816200705456e-06, "loss": 0.8213, "step": 16970 }, { "epoch": 0.8657523071432214, "grad_norm": 2.804037331505134, "learning_rate": 4.65596078043894e-06, "loss": 0.9377, "step": 16980 }, { "epoch": 0.8662621730484883, "grad_norm": 2.810177137052245, "learning_rate": 4.621229016452156e-06, "loss": 0.838, "step": 16990 }, { "epoch": 0.8667720389537552, "grad_norm": 3.5617441410460717, "learning_rate": 4.586621003460978e-06, "loss": 0.9192, "step": 17000 }, { "epoch": 0.8672819048590221, "grad_norm": 3.1535985280516523, "learning_rate": 4.552136835843801e-06, "loss": 0.7998, "step": 17010 }, { "epoch": 0.867791770764289, "grad_norm": 2.911092583241028, "learning_rate": 4.517776607641327e-06, "loss": 0.8529, "step": 17020 }, { "epoch": 0.868301636669556, "grad_norm": 2.6097489312022586, "learning_rate": 4.483540412556214e-06, "loss": 0.8552, "step": 17030 }, { "epoch": 0.8688115025748229, "grad_norm": 2.825900733733966, "learning_rate": 4.4494283439528995e-06, "loss": 0.8381, "step": 17040 }, { "epoch": 0.8693213684800898, "grad_norm": 4.707749869450497, "learning_rate": 4.41544049485732e-06, "loss": 0.881, "step": 17050 }, { "epoch": 0.8698312343853567, "grad_norm": 3.033282832876074, "learning_rate": 4.381576957956635e-06, "loss": 0.8764, "step": 17060 }, { "epoch": 0.8703411002906236, "grad_norm": 2.4140441305510634, "learning_rate": 4.347837825599044e-06, "loss": 0.8526, "step": 17070 }, { "epoch": 0.8708509661958905, "grad_norm": 2.703898562677979, "learning_rate": 4.31422318979342e-06, "loss": 0.9, "step": 17080 }, { "epoch": 0.8713608321011574, "grad_norm": 2.648912615174052, "learning_rate": 4.280733142209198e-06, "loss": 0.8763, "step": 17090 }, { "epoch": 0.8718706980064244, "grad_norm": 2.64571381505409, "learning_rate": 4.247367774175981e-06, "loss": 0.7991, "step": 17100 }, { "epoch": 0.8723805639116913, "grad_norm": 5.957919067488417, "learning_rate": 4.214127176683425e-06, "loss": 0.8429, "step": 17110 }, { "epoch": 0.8728904298169582, "grad_norm": 3.0097268354940585, "learning_rate": 4.181011440380889e-06, "loss": 0.8668, "step": 17120 }, { "epoch": 0.8734002957222251, "grad_norm": 3.0171216108212615, "learning_rate": 4.1480206555772335e-06, "loss": 0.8954, "step": 17130 }, { "epoch": 0.873910161627492, "grad_norm": 3.2224816444578805, "learning_rate": 4.115154912240593e-06, "loss": 0.8545, "step": 17140 }, { "epoch": 0.8744200275327589, "grad_norm": 2.858710303956987, "learning_rate": 4.082414299998061e-06, "loss": 0.7906, "step": 17150 }, { "epoch": 0.8749298934380259, "grad_norm": 3.1304540062971036, "learning_rate": 4.049798908135538e-06, "loss": 0.8259, "step": 17160 }, { "epoch": 0.8754397593432928, "grad_norm": 3.201828897583765, "learning_rate": 4.0173088255973876e-06, "loss": 0.8815, "step": 17170 }, { "epoch": 0.8759496252485597, "grad_norm": 3.90605922682777, "learning_rate": 3.984944140986302e-06, "loss": 0.8411, "step": 17180 }, { "epoch": 0.8764594911538266, "grad_norm": 2.909015114801005, "learning_rate": 3.9527049425629625e-06, "loss": 0.8251, "step": 17190 }, { "epoch": 0.8769693570590935, "grad_norm": 4.4745654254538945, "learning_rate": 3.920591318245864e-06, "loss": 0.8814, "step": 17200 }, { "epoch": 0.8774792229643604, "grad_norm": 3.679638801253417, "learning_rate": 3.888603355611043e-06, "loss": 0.8712, "step": 17210 }, { "epoch": 0.8779890888696273, "grad_norm": 3.148123725056795, "learning_rate": 3.856741141891851e-06, "loss": 0.8884, "step": 17220 }, { "epoch": 0.8784989547748941, "grad_norm": 3.0572988475286023, "learning_rate": 3.825004763978707e-06, "loss": 0.8611, "step": 17230 }, { "epoch": 0.8790088206801611, "grad_norm": 2.8463176761925735, "learning_rate": 3.7933943084188685e-06, "loss": 0.8491, "step": 17240 }, { "epoch": 0.879518686585428, "grad_norm": 3.484277901488632, "learning_rate": 3.761909861416213e-06, "loss": 0.8225, "step": 17250 }, { "epoch": 0.8800285524906949, "grad_norm": 2.3843857621790083, "learning_rate": 3.730551508830965e-06, "loss": 0.8028, "step": 17260 }, { "epoch": 0.8805384183959618, "grad_norm": 3.927491445853449, "learning_rate": 3.6993193361794797e-06, "loss": 0.8858, "step": 17270 }, { "epoch": 0.8810482843012287, "grad_norm": 2.4728935704745054, "learning_rate": 3.668213428634021e-06, "loss": 0.8672, "step": 17280 }, { "epoch": 0.8815581502064956, "grad_norm": 3.4701561651798274, "learning_rate": 3.6372338710225106e-06, "loss": 0.8661, "step": 17290 }, { "epoch": 0.8820680161117626, "grad_norm": 2.7708196871940167, "learning_rate": 3.6063807478283085e-06, "loss": 0.8383, "step": 17300 }, { "epoch": 0.8825778820170295, "grad_norm": 2.121142389184302, "learning_rate": 3.5756541431899994e-06, "loss": 0.8571, "step": 17310 }, { "epoch": 0.8830877479222964, "grad_norm": 2.679428452061065, "learning_rate": 3.545054140901094e-06, "loss": 0.8894, "step": 17320 }, { "epoch": 0.8835976138275633, "grad_norm": 3.627841196640346, "learning_rate": 3.514580824409902e-06, "loss": 0.8473, "step": 17330 }, { "epoch": 0.8841074797328302, "grad_norm": 2.461704176305592, "learning_rate": 3.484234276819226e-06, "loss": 0.8029, "step": 17340 }, { "epoch": 0.8846173456380971, "grad_norm": 2.145063491566008, "learning_rate": 3.4540145808861615e-06, "loss": 0.7239, "step": 17350 }, { "epoch": 0.885127211543364, "grad_norm": 3.181073473816311, "learning_rate": 3.42392181902188e-06, "loss": 0.8498, "step": 17360 }, { "epoch": 0.885637077448631, "grad_norm": 2.0408458344074734, "learning_rate": 3.3939560732913766e-06, "loss": 0.8554, "step": 17370 }, { "epoch": 0.8861469433538979, "grad_norm": 2.5692246053049166, "learning_rate": 3.3641174254133103e-06, "loss": 0.8444, "step": 17380 }, { "epoch": 0.8866568092591648, "grad_norm": 2.491133156069609, "learning_rate": 3.3344059567596676e-06, "loss": 0.8227, "step": 17390 }, { "epoch": 0.8871666751644317, "grad_norm": 3.1954460705488983, "learning_rate": 3.3048217483556744e-06, "loss": 0.8489, "step": 17400 }, { "epoch": 0.8876765410696986, "grad_norm": 3.5272710375611718, "learning_rate": 3.2753648808794503e-06, "loss": 0.8596, "step": 17410 }, { "epoch": 0.8881864069749655, "grad_norm": 4.225068478639933, "learning_rate": 3.2460354346619037e-06, "loss": 0.857, "step": 17420 }, { "epoch": 0.8886962728802325, "grad_norm": 2.8309100959825795, "learning_rate": 3.216833489686416e-06, "loss": 0.888, "step": 17430 }, { "epoch": 0.8892061387854994, "grad_norm": 3.3137678401694326, "learning_rate": 3.1877591255886795e-06, "loss": 0.8399, "step": 17440 }, { "epoch": 0.8897160046907663, "grad_norm": 4.637502639723594, "learning_rate": 3.158812421656465e-06, "loss": 0.8119, "step": 17450 }, { "epoch": 0.8902258705960332, "grad_norm": 2.5633260057758336, "learning_rate": 3.1299934568293944e-06, "loss": 0.8342, "step": 17460 }, { "epoch": 0.8907357365013001, "grad_norm": 3.1154186287560752, "learning_rate": 3.1013023096987726e-06, "loss": 0.8631, "step": 17470 }, { "epoch": 0.891245602406567, "grad_norm": 5.142330007176737, "learning_rate": 3.072739058507268e-06, "loss": 0.8792, "step": 17480 }, { "epoch": 0.8917554683118339, "grad_norm": 3.1723051722577713, "learning_rate": 3.044303781148844e-06, "loss": 0.9074, "step": 17490 }, { "epoch": 0.8922653342171009, "grad_norm": 1.9982237903220441, "learning_rate": 3.0159965551684265e-06, "loss": 0.8301, "step": 17500 }, { "epoch": 0.8927752001223678, "grad_norm": 3.126696697645224, "learning_rate": 2.9878174577617436e-06, "loss": 0.8203, "step": 17510 }, { "epoch": 0.8932850660276347, "grad_norm": 3.266740995165527, "learning_rate": 2.9597665657751073e-06, "loss": 0.8285, "step": 17520 }, { "epoch": 0.8937949319329016, "grad_norm": 4.6025351798463845, "learning_rate": 2.931843955705216e-06, "loss": 0.8886, "step": 17530 }, { "epoch": 0.8943047978381685, "grad_norm": 2.5723945485729836, "learning_rate": 2.9040497036989246e-06, "loss": 0.8767, "step": 17540 }, { "epoch": 0.8948146637434354, "grad_norm": 4.222542833788573, "learning_rate": 2.8763838855530414e-06, "loss": 0.8077, "step": 17550 }, { "epoch": 0.8953245296487023, "grad_norm": 2.4980823944930743, "learning_rate": 2.848846576714148e-06, "loss": 0.8911, "step": 17560 }, { "epoch": 0.8958343955539693, "grad_norm": 1.9783994401103546, "learning_rate": 2.8214378522783523e-06, "loss": 0.9195, "step": 17570 }, { "epoch": 0.8963442614592362, "grad_norm": 3.172365700936146, "learning_rate": 2.7941577869911084e-06, "loss": 0.8592, "step": 17580 }, { "epoch": 0.8968541273645031, "grad_norm": 2.8205375859859343, "learning_rate": 2.7670064552470174e-06, "loss": 0.8388, "step": 17590 }, { "epoch": 0.89736399326977, "grad_norm": 4.238387100796564, "learning_rate": 2.7399839310896015e-06, "loss": 0.8409, "step": 17600 }, { "epoch": 0.8978738591750369, "grad_norm": 3.1582031673238418, "learning_rate": 2.7130902882111177e-06, "loss": 0.8413, "step": 17610 }, { "epoch": 0.8983837250803038, "grad_norm": 3.0814373048515025, "learning_rate": 2.6863255999523774e-06, "loss": 0.8555, "step": 17620 }, { "epoch": 0.8988935909855708, "grad_norm": 3.509354259364666, "learning_rate": 2.6596899393024845e-06, "loss": 0.8623, "step": 17630 }, { "epoch": 0.8994034568908377, "grad_norm": 2.429266270195368, "learning_rate": 2.6331833788987126e-06, "loss": 0.8142, "step": 17640 }, { "epoch": 0.8999133227961046, "grad_norm": 3.3980722166350916, "learning_rate": 2.606805991026251e-06, "loss": 0.9006, "step": 17650 }, { "epoch": 0.9004231887013715, "grad_norm": 4.482747939131566, "learning_rate": 2.5805578476180312e-06, "loss": 0.9059, "step": 17660 }, { "epoch": 0.9009330546066384, "grad_norm": 4.9663211271909855, "learning_rate": 2.554439020254529e-06, "loss": 0.9362, "step": 17670 }, { "epoch": 0.9014429205119053, "grad_norm": 4.02891398644143, "learning_rate": 2.52844958016355e-06, "loss": 0.8954, "step": 17680 }, { "epoch": 0.9019527864171722, "grad_norm": 2.4905247597697993, "learning_rate": 2.50258959822009e-06, "loss": 0.8782, "step": 17690 }, { "epoch": 0.9024626523224392, "grad_norm": 5.0959674052387385, "learning_rate": 2.476859144946053e-06, "loss": 0.8809, "step": 17700 }, { "epoch": 0.9029725182277061, "grad_norm": 2.232474885119816, "learning_rate": 2.4512582905101555e-06, "loss": 0.8307, "step": 17710 }, { "epoch": 0.903482384132973, "grad_norm": 3.202013982667936, "learning_rate": 2.4257871047276504e-06, "loss": 0.8072, "step": 17720 }, { "epoch": 0.9039922500382399, "grad_norm": 2.8089674658641233, "learning_rate": 2.400445657060213e-06, "loss": 0.8563, "step": 17730 }, { "epoch": 0.9045021159435068, "grad_norm": 3.9974832160033276, "learning_rate": 2.3752340166156793e-06, "loss": 0.8396, "step": 17740 }, { "epoch": 0.9050119818487737, "grad_norm": 3.219586185849582, "learning_rate": 2.350152252147919e-06, "loss": 0.8687, "step": 17750 }, { "epoch": 0.9055218477540407, "grad_norm": 2.990664067479295, "learning_rate": 2.3252004320566124e-06, "loss": 0.8481, "step": 17760 }, { "epoch": 0.9060317136593076, "grad_norm": 4.047170027027108, "learning_rate": 2.300378624387051e-06, "loss": 0.8663, "step": 17770 }, { "epoch": 0.9065415795645745, "grad_norm": 2.181344311357337, "learning_rate": 2.2756868968300203e-06, "loss": 0.8759, "step": 17780 }, { "epoch": 0.9070514454698414, "grad_norm": 3.1345299664847044, "learning_rate": 2.251125316721514e-06, "loss": 0.8129, "step": 17790 }, { "epoch": 0.9075613113751083, "grad_norm": 6.306032769349942, "learning_rate": 2.226693951042652e-06, "loss": 0.8502, "step": 17800 }, { "epoch": 0.9080711772803752, "grad_norm": 2.514134962454054, "learning_rate": 2.202392866419423e-06, "loss": 0.801, "step": 17810 }, { "epoch": 0.9085810431856421, "grad_norm": 2.272146093504471, "learning_rate": 2.1782221291225367e-06, "loss": 0.915, "step": 17820 }, { "epoch": 0.9090909090909091, "grad_norm": 2.6031288042931595, "learning_rate": 2.15418180506724e-06, "loss": 0.8785, "step": 17830 }, { "epoch": 0.909600774996176, "grad_norm": 4.832136636171037, "learning_rate": 2.130271959813135e-06, "loss": 0.8516, "step": 17840 }, { "epoch": 0.9101106409014429, "grad_norm": 2.9257845681995605, "learning_rate": 2.1064926585639854e-06, "loss": 0.8414, "step": 17850 }, { "epoch": 0.9106205068067098, "grad_norm": 2.800217543345524, "learning_rate": 2.08284396616758e-06, "loss": 0.8168, "step": 17860 }, { "epoch": 0.9111303727119767, "grad_norm": 3.184719885995561, "learning_rate": 2.0593259471155037e-06, "loss": 0.8713, "step": 17870 }, { "epoch": 0.9116402386172436, "grad_norm": 2.772795236218663, "learning_rate": 2.035938665542991e-06, "loss": 0.8966, "step": 17880 }, { "epoch": 0.9121501045225106, "grad_norm": 3.447137143036548, "learning_rate": 2.012682185228754e-06, "loss": 0.9544, "step": 17890 }, { "epoch": 0.9126599704277775, "grad_norm": 5.177406451744961, "learning_rate": 1.9895565695947915e-06, "loss": 0.9039, "step": 17900 }, { "epoch": 0.9131698363330444, "grad_norm": 3.6842047845018624, "learning_rate": 1.966561881706236e-06, "loss": 0.8765, "step": 17910 }, { "epoch": 0.9136797022383113, "grad_norm": 3.3134450967911846, "learning_rate": 1.943698184271159e-06, "loss": 0.8003, "step": 17920 }, { "epoch": 0.9141895681435782, "grad_norm": 3.5691519411459467, "learning_rate": 1.9209655396404348e-06, "loss": 0.863, "step": 17930 }, { "epoch": 0.9146994340488451, "grad_norm": 2.296992348640956, "learning_rate": 1.8983640098075128e-06, "loss": 0.8445, "step": 17940 }, { "epoch": 0.915209299954112, "grad_norm": 3.111035863780307, "learning_rate": 1.8758936564083118e-06, "loss": 0.8159, "step": 17950 }, { "epoch": 0.915719165859379, "grad_norm": 2.7404784101065705, "learning_rate": 1.8535545407210141e-06, "loss": 0.8563, "step": 17960 }, { "epoch": 0.9162290317646459, "grad_norm": 5.396627788378159, "learning_rate": 1.831346723665911e-06, "loss": 0.8917, "step": 17970 }, { "epoch": 0.9167388976699128, "grad_norm": 3.332001142997155, "learning_rate": 1.809270265805224e-06, "loss": 0.8533, "step": 17980 }, { "epoch": 0.9172487635751797, "grad_norm": 4.0075701508468216, "learning_rate": 1.7873252273429509e-06, "loss": 0.8424, "step": 17990 }, { "epoch": 0.9177586294804466, "grad_norm": 3.9804673466169542, "learning_rate": 1.7655116681247197e-06, "loss": 0.8345, "step": 18000 }, { "epoch": 0.9182684953857135, "grad_norm": 5.6240808208381825, "learning_rate": 1.7438296476375738e-06, "loss": 0.8592, "step": 18010 }, { "epoch": 0.9187783612909804, "grad_norm": 6.511416754863257, "learning_rate": 1.7222792250098763e-06, "loss": 0.8934, "step": 18020 }, { "epoch": 0.9192882271962474, "grad_norm": 3.818889946851894, "learning_rate": 1.7008604590110776e-06, "loss": 0.8744, "step": 18030 }, { "epoch": 0.9197980931015143, "grad_norm": 4.978118562625245, "learning_rate": 1.6795734080516212e-06, "loss": 0.8693, "step": 18040 }, { "epoch": 0.9203079590067812, "grad_norm": 2.9894812066642387, "learning_rate": 1.658418130182743e-06, "loss": 0.8317, "step": 18050 }, { "epoch": 0.9208178249120481, "grad_norm": 2.9154950321529007, "learning_rate": 1.637394683096316e-06, "loss": 0.9224, "step": 18060 }, { "epoch": 0.921327690817315, "grad_norm": 2.355938059666778, "learning_rate": 1.6165031241247186e-06, "loss": 0.8099, "step": 18070 }, { "epoch": 0.9218375567225819, "grad_norm": 2.1708836279185104, "learning_rate": 1.5957435102406494e-06, "loss": 0.8291, "step": 18080 }, { "epoch": 0.9223474226278489, "grad_norm": 4.69544072022346, "learning_rate": 1.5751158980569948e-06, "loss": 0.8459, "step": 18090 }, { "epoch": 0.9228572885331158, "grad_norm": 3.687481252247572, "learning_rate": 1.554620343826646e-06, "loss": 0.9609, "step": 18100 }, { "epoch": 0.9233671544383827, "grad_norm": 3.800003633358725, "learning_rate": 1.5342569034423826e-06, "loss": 0.8705, "step": 18110 }, { "epoch": 0.9238770203436496, "grad_norm": 3.7227514008001883, "learning_rate": 1.5140256324366885e-06, "loss": 0.8751, "step": 18120 }, { "epoch": 0.9243868862489165, "grad_norm": 2.4800374400454124, "learning_rate": 1.49392658598162e-06, "loss": 0.863, "step": 18130 }, { "epoch": 0.9248967521541834, "grad_norm": 2.1785861029637106, "learning_rate": 1.4739598188886317e-06, "loss": 0.8217, "step": 18140 }, { "epoch": 0.9254066180594503, "grad_norm": 2.6884334032146144, "learning_rate": 1.454125385608468e-06, "loss": 0.8679, "step": 18150 }, { "epoch": 0.9259164839647173, "grad_norm": 4.013025044520481, "learning_rate": 1.4344233402309725e-06, "loss": 0.8446, "step": 18160 }, { "epoch": 0.9264263498699842, "grad_norm": 2.4964188372621234, "learning_rate": 1.4148537364849667e-06, "loss": 0.904, "step": 18170 }, { "epoch": 0.9269362157752511, "grad_norm": 2.6163917793548253, "learning_rate": 1.3954166277380887e-06, "loss": 0.8072, "step": 18180 }, { "epoch": 0.927446081680518, "grad_norm": 3.3507859967207136, "learning_rate": 1.3761120669966543e-06, "loss": 0.9365, "step": 18190 }, { "epoch": 0.9279559475857849, "grad_norm": 2.8148204761347677, "learning_rate": 1.3569401069055243e-06, "loss": 0.8355, "step": 18200 }, { "epoch": 0.9284658134910518, "grad_norm": 4.4364629029646165, "learning_rate": 1.3379007997479265e-06, "loss": 0.8396, "step": 18210 }, { "epoch": 0.9289756793963188, "grad_norm": 4.598709875393512, "learning_rate": 1.31899419744535e-06, "loss": 0.8127, "step": 18220 }, { "epoch": 0.9294855453015857, "grad_norm": 3.008791548355237, "learning_rate": 1.3002203515573797e-06, "loss": 0.837, "step": 18230 }, { "epoch": 0.9299954112068526, "grad_norm": 2.4934456906527895, "learning_rate": 1.281579313281589e-06, "loss": 0.845, "step": 18240 }, { "epoch": 0.9305052771121195, "grad_norm": 6.4712387273811665, "learning_rate": 1.2630711334533363e-06, "loss": 0.858, "step": 18250 }, { "epoch": 0.9310151430173864, "grad_norm": 6.810541506555469, "learning_rate": 1.2446958625457028e-06, "loss": 0.8561, "step": 18260 }, { "epoch": 0.9315250089226533, "grad_norm": 3.229393308656394, "learning_rate": 1.2264535506692931e-06, "loss": 0.8296, "step": 18270 }, { "epoch": 0.9320348748279202, "grad_norm": 5.310872254686404, "learning_rate": 1.2083442475721352e-06, "loss": 0.8209, "step": 18280 }, { "epoch": 0.9325447407331872, "grad_norm": 3.6780531426193694, "learning_rate": 1.1903680026395358e-06, "loss": 0.8667, "step": 18290 }, { "epoch": 0.9330546066384541, "grad_norm": 3.550568121391439, "learning_rate": 1.1725248648939257e-06, "loss": 0.9308, "step": 18300 }, { "epoch": 0.933564472543721, "grad_norm": 3.6210320011097017, "learning_rate": 1.1548148829947702e-06, "loss": 0.8885, "step": 18310 }, { "epoch": 0.9340743384489879, "grad_norm": 2.2545294133233917, "learning_rate": 1.1372381052383806e-06, "loss": 0.8348, "step": 18320 }, { "epoch": 0.9345842043542548, "grad_norm": 5.502375411852244, "learning_rate": 1.1197945795578368e-06, "loss": 0.9413, "step": 18330 }, { "epoch": 0.9350940702595217, "grad_norm": 3.2828861888220544, "learning_rate": 1.1024843535228092e-06, "loss": 0.8252, "step": 18340 }, { "epoch": 0.9356039361647887, "grad_norm": 3.354389645771977, "learning_rate": 1.0853074743394754e-06, "loss": 0.7994, "step": 18350 }, { "epoch": 0.9361138020700556, "grad_norm": 2.4938196469945697, "learning_rate": 1.0682639888503432e-06, "loss": 0.9092, "step": 18360 }, { "epoch": 0.9366236679753225, "grad_norm": 2.5970589468350007, "learning_rate": 1.0513539435341613e-06, "loss": 0.8096, "step": 18370 }, { "epoch": 0.9371335338805894, "grad_norm": 3.7846310218382433, "learning_rate": 1.0345773845057694e-06, "loss": 0.893, "step": 18380 }, { "epoch": 0.9376433997858563, "grad_norm": 3.9466233340933443, "learning_rate": 1.017934357515987e-06, "loss": 0.7901, "step": 18390 }, { "epoch": 0.9381532656911232, "grad_norm": 4.126880922970423, "learning_rate": 1.001424907951487e-06, "loss": 0.8716, "step": 18400 }, { "epoch": 0.9386631315963901, "grad_norm": 5.323546757862889, "learning_rate": 9.850490808346547e-07, "loss": 0.898, "step": 18410 }, { "epoch": 0.9391729975016571, "grad_norm": 3.7539835289368044, "learning_rate": 9.68806920823484e-07, "loss": 0.9291, "step": 18420 }, { "epoch": 0.939682863406924, "grad_norm": 2.6787913001334136, "learning_rate": 9.526984722114552e-07, "loss": 0.8507, "step": 18430 }, { "epoch": 0.9401927293121909, "grad_norm": 4.331291924536329, "learning_rate": 9.367237789274063e-07, "loss": 0.8842, "step": 18440 }, { "epoch": 0.9407025952174578, "grad_norm": 2.8944294666822934, "learning_rate": 9.208828845354012e-07, "loss": 0.8369, "step": 18450 }, { "epoch": 0.9412124611227247, "grad_norm": 4.80492667919156, "learning_rate": 9.051758322346449e-07, "loss": 0.8094, "step": 18460 }, { "epoch": 0.9417223270279916, "grad_norm": 3.0192759369305078, "learning_rate": 8.896026648593292e-07, "loss": 0.8277, "step": 18470 }, { "epoch": 0.9422321929332585, "grad_norm": 2.647403491113819, "learning_rate": 8.741634248785547e-07, "loss": 0.8789, "step": 18480 }, { "epoch": 0.9427420588385255, "grad_norm": 2.6179013595583815, "learning_rate": 8.588581543961694e-07, "loss": 0.9285, "step": 18490 }, { "epoch": 0.9432519247437924, "grad_norm": 2.9221622793565207, "learning_rate": 8.436868951506916e-07, "loss": 0.8862, "step": 18500 }, { "epoch": 0.9437617906490593, "grad_norm": 5.52126521135154, "learning_rate": 8.286496885151762e-07, "loss": 0.882, "step": 18510 }, { "epoch": 0.9442716565543262, "grad_norm": 2.612025259812181, "learning_rate": 8.137465754971041e-07, "loss": 0.8955, "step": 18520 }, { "epoch": 0.9447815224595931, "grad_norm": 2.405972712557117, "learning_rate": 7.989775967382873e-07, "loss": 0.847, "step": 18530 }, { "epoch": 0.94529138836486, "grad_norm": 2.5901067144410295, "learning_rate": 7.84342792514725e-07, "loss": 0.8647, "step": 18540 }, { "epoch": 0.945801254270127, "grad_norm": 3.606420599385904, "learning_rate": 7.698422027365315e-07, "loss": 0.8391, "step": 18550 }, { "epoch": 0.9463111201753939, "grad_norm": 3.95732009618789, "learning_rate": 7.554758669477969e-07, "loss": 0.9192, "step": 18560 }, { "epoch": 0.9468209860806608, "grad_norm": 2.899827912797022, "learning_rate": 7.41243824326504e-07, "loss": 0.8217, "step": 18570 }, { "epoch": 0.9473308519859277, "grad_norm": 5.517485836932538, "learning_rate": 7.271461136843904e-07, "loss": 0.8674, "step": 18580 }, { "epoch": 0.9478407178911946, "grad_norm": 3.495295917773964, "learning_rate": 7.131827734668805e-07, "loss": 0.8096, "step": 18590 }, { "epoch": 0.9483505837964615, "grad_norm": 8.187709950219462, "learning_rate": 6.993538417529533e-07, "loss": 0.8767, "step": 18600 }, { "epoch": 0.9488604497017284, "grad_norm": 2.8840730999518667, "learning_rate": 6.856593562550362e-07, "loss": 0.892, "step": 18610 }, { "epoch": 0.9493703156069954, "grad_norm": 2.5226780687684403, "learning_rate": 6.720993543189391e-07, "loss": 0.8174, "step": 18620 }, { "epoch": 0.9498801815122623, "grad_norm": 2.951515420506232, "learning_rate": 6.58673872923693e-07, "loss": 0.9074, "step": 18630 }, { "epoch": 0.9503900474175292, "grad_norm": 4.792672983968864, "learning_rate": 6.45382948681511e-07, "loss": 0.7992, "step": 18640 }, { "epoch": 0.9508999133227961, "grad_norm": 3.3172076323467623, "learning_rate": 6.32226617837639e-07, "loss": 0.8372, "step": 18650 }, { "epoch": 0.951409779228063, "grad_norm": 4.892347892210914, "learning_rate": 6.19204916270294e-07, "loss": 0.9272, "step": 18660 }, { "epoch": 0.9519196451333299, "grad_norm": 2.7132416386861546, "learning_rate": 6.063178794905367e-07, "loss": 0.9031, "step": 18670 }, { "epoch": 0.9524295110385969, "grad_norm": 2.2206872979539916, "learning_rate": 5.935655426421993e-07, "loss": 0.8637, "step": 18680 }, { "epoch": 0.9529393769438638, "grad_norm": 2.5469356130158562, "learning_rate": 5.809479405017638e-07, "loss": 0.8436, "step": 18690 }, { "epoch": 0.9534492428491307, "grad_norm": 4.463210714047275, "learning_rate": 5.684651074782887e-07, "loss": 0.8379, "step": 18700 }, { "epoch": 0.9539591087543976, "grad_norm": 2.8880619765761852, "learning_rate": 5.561170776133162e-07, "loss": 0.8726, "step": 18710 }, { "epoch": 0.9544689746596645, "grad_norm": 3.0248441312783063, "learning_rate": 5.439038845807654e-07, "loss": 0.8712, "step": 18720 }, { "epoch": 0.9549788405649314, "grad_norm": 2.334921985703277, "learning_rate": 5.318255616868384e-07, "loss": 0.8826, "step": 18730 }, { "epoch": 0.9554887064701983, "grad_norm": 2.443863308599239, "learning_rate": 5.198821418699485e-07, "loss": 0.8048, "step": 18740 }, { "epoch": 0.9559985723754653, "grad_norm": 3.832459608362289, "learning_rate": 5.080736577006085e-07, "loss": 0.8486, "step": 18750 }, { "epoch": 0.9565084382807322, "grad_norm": 2.9960659443060105, "learning_rate": 4.96400141381359e-07, "loss": 0.8473, "step": 18760 }, { "epoch": 0.9570183041859991, "grad_norm": 3.721754717411947, "learning_rate": 4.848616247466742e-07, "loss": 0.9229, "step": 18770 }, { "epoch": 0.957528170091266, "grad_norm": 5.15130782050773, "learning_rate": 4.734581392628723e-07, "loss": 0.8467, "step": 18780 }, { "epoch": 0.9580380359965329, "grad_norm": 2.7456251498558526, "learning_rate": 4.6218971602804416e-07, "loss": 0.8612, "step": 18790 }, { "epoch": 0.9585479019017998, "grad_norm": 3.8548883471679636, "learning_rate": 4.5105638577193075e-07, "loss": 0.8309, "step": 18800 }, { "epoch": 0.9590577678070668, "grad_norm": 4.873377567827376, "learning_rate": 4.4005817885588994e-07, "loss": 0.8623, "step": 18810 }, { "epoch": 0.9595676337123337, "grad_norm": 3.6238646663565297, "learning_rate": 4.291951252727855e-07, "loss": 0.8575, "step": 18820 }, { "epoch": 0.9600774996176006, "grad_norm": 3.798716392866742, "learning_rate": 4.1846725464688706e-07, "loss": 0.8485, "step": 18830 }, { "epoch": 0.9605873655228675, "grad_norm": 3.7487654835230075, "learning_rate": 4.078745962338482e-07, "loss": 0.8878, "step": 18840 }, { "epoch": 0.9610972314281344, "grad_norm": 3.340578816783333, "learning_rate": 3.9741717892055054e-07, "loss": 0.8919, "step": 18850 }, { "epoch": 0.9616070973334013, "grad_norm": 2.995233367558827, "learning_rate": 3.8709503122509873e-07, "loss": 0.8168, "step": 18860 }, { "epoch": 0.9621169632386682, "grad_norm": 2.3192678470510044, "learning_rate": 3.769081812966757e-07, "loss": 0.8696, "step": 18870 }, { "epoch": 0.9626268291439352, "grad_norm": 2.1744727704573945, "learning_rate": 3.6685665691552073e-07, "loss": 0.8569, "step": 18880 }, { "epoch": 0.9631366950492021, "grad_norm": 3.7160267401899008, "learning_rate": 3.569404854928293e-07, "loss": 0.8157, "step": 18890 }, { "epoch": 0.963646560954469, "grad_norm": 4.630686609087495, "learning_rate": 3.4715969407067563e-07, "loss": 0.8211, "step": 18900 }, { "epoch": 0.9641564268597359, "grad_norm": 3.106827725800854, "learning_rate": 3.3751430932193464e-07, "loss": 0.8574, "step": 18910 }, { "epoch": 0.9646662927650028, "grad_norm": 2.2102336485159113, "learning_rate": 3.280043575502323e-07, "loss": 0.8193, "step": 18920 }, { "epoch": 0.9651761586702697, "grad_norm": 5.731003156630731, "learning_rate": 3.186298646898567e-07, "loss": 0.9305, "step": 18930 }, { "epoch": 0.9656860245755367, "grad_norm": 2.753986389365798, "learning_rate": 3.0939085630568575e-07, "loss": 0.8759, "step": 18940 }, { "epoch": 0.9661958904808036, "grad_norm": 3.6225098005096625, "learning_rate": 3.0028735759311533e-07, "loss": 0.879, "step": 18950 }, { "epoch": 0.9667057563860705, "grad_norm": 3.630026612949022, "learning_rate": 2.9131939337802007e-07, "loss": 0.8184, "step": 18960 }, { "epoch": 0.9672156222913374, "grad_norm": 2.528698326586515, "learning_rate": 2.8248698811664254e-07, "loss": 0.8175, "step": 18970 }, { "epoch": 0.9677254881966043, "grad_norm": 3.6075225689470747, "learning_rate": 2.7379016589554886e-07, "loss": 0.8528, "step": 18980 }, { "epoch": 0.9682353541018712, "grad_norm": 3.8572349586637698, "learning_rate": 2.6522895043156747e-07, "loss": 0.8989, "step": 18990 }, { "epoch": 0.9687452200071381, "grad_norm": 3.905290595313625, "learning_rate": 2.5680336507171166e-07, "loss": 0.8847, "step": 19000 }, { "epoch": 0.9692550859124051, "grad_norm": 1.833745222898789, "learning_rate": 2.4851343279313487e-07, "loss": 0.8051, "step": 19010 }, { "epoch": 0.969764951817672, "grad_norm": 2.7435237091518254, "learning_rate": 2.403591762030366e-07, "loss": 0.9036, "step": 19020 }, { "epoch": 0.9702748177229389, "grad_norm": 2.2270515613751094, "learning_rate": 2.3234061753864e-07, "loss": 0.824, "step": 19030 }, { "epoch": 0.9707846836282058, "grad_norm": 5.766577828637446, "learning_rate": 2.2445777866709205e-07, "loss": 0.8162, "step": 19040 }, { "epoch": 0.9712945495334727, "grad_norm": 2.8523883146831492, "learning_rate": 2.1671068108543026e-07, "loss": 0.8187, "step": 19050 }, { "epoch": 0.9718044154387396, "grad_norm": 2.8321644412768308, "learning_rate": 2.090993459205215e-07, "loss": 0.9254, "step": 19060 }, { "epoch": 0.9723142813440065, "grad_norm": 3.219091604605024, "learning_rate": 2.0162379392899556e-07, "loss": 0.9144, "step": 19070 }, { "epoch": 0.9728241472492735, "grad_norm": 2.6000113810442222, "learning_rate": 1.9428404549718393e-07, "loss": 0.7943, "step": 19080 }, { "epoch": 0.9733340131545404, "grad_norm": 3.4986370845152983, "learning_rate": 1.8708012064108106e-07, "loss": 0.7995, "step": 19090 }, { "epoch": 0.9738438790598073, "grad_norm": 3.467721195824343, "learning_rate": 1.800120390062887e-07, "loss": 0.9503, "step": 19100 }, { "epoch": 0.9743537449650742, "grad_norm": 2.5588487892459004, "learning_rate": 1.7307981986793843e-07, "loss": 0.7897, "step": 19110 }, { "epoch": 0.9748636108703411, "grad_norm": 2.8331779889322033, "learning_rate": 1.6628348213066913e-07, "loss": 0.8137, "step": 19120 }, { "epoch": 0.975373476775608, "grad_norm": 3.7365341468756914, "learning_rate": 1.5962304432854957e-07, "loss": 0.8225, "step": 19130 }, { "epoch": 0.975883342680875, "grad_norm": 2.0910251675107805, "learning_rate": 1.5309852462504492e-07, "loss": 0.8462, "step": 19140 }, { "epoch": 0.9763932085861419, "grad_norm": 4.446574867132785, "learning_rate": 1.4670994081297795e-07, "loss": 0.8717, "step": 19150 }, { "epoch": 0.9769030744914088, "grad_norm": 2.760631269253728, "learning_rate": 1.4045731031444021e-07, "loss": 0.8413, "step": 19160 }, { "epoch": 0.9774129403966757, "grad_norm": 4.015189432538389, "learning_rate": 1.343406501807809e-07, "loss": 0.9286, "step": 19170 }, { "epoch": 0.9779228063019426, "grad_norm": 3.306857253735354, "learning_rate": 1.2835997709255144e-07, "loss": 0.7985, "step": 19180 }, { "epoch": 0.9784326722072095, "grad_norm": 4.416507109910236, "learning_rate": 1.2251530735944982e-07, "loss": 0.8326, "step": 19190 }, { "epoch": 0.9789425381124764, "grad_norm": 2.5764728953582834, "learning_rate": 1.1680665692029302e-07, "loss": 0.861, "step": 19200 }, { "epoch": 0.9794524040177434, "grad_norm": 3.090904035193718, "learning_rate": 1.1123404134296134e-07, "loss": 0.923, "step": 19210 }, { "epoch": 0.9799622699230103, "grad_norm": 3.2227974534815096, "learning_rate": 1.0579747582434851e-07, "loss": 0.847, "step": 19220 }, { "epoch": 0.9804721358282772, "grad_norm": 2.2911374257791897, "learning_rate": 1.0049697519034506e-07, "loss": 0.8207, "step": 19230 }, { "epoch": 0.9809820017335441, "grad_norm": 3.3046177567956527, "learning_rate": 9.533255389577166e-08, "loss": 0.9242, "step": 19240 }, { "epoch": 0.981491867638811, "grad_norm": 5.309897401920478, "learning_rate": 9.03042260243625e-08, "loss": 0.8945, "step": 19250 }, { "epoch": 0.9820017335440779, "grad_norm": 3.11514881873959, "learning_rate": 8.54120052887042e-08, "loss": 0.8484, "step": 19260 }, { "epoch": 0.9825115994493449, "grad_norm": 2.8082219376350217, "learning_rate": 8.065590503021914e-08, "loss": 0.8642, "step": 19270 }, { "epoch": 0.9830214653546118, "grad_norm": 2.881519832008848, "learning_rate": 7.603593821911558e-08, "loss": 0.8065, "step": 19280 }, { "epoch": 0.9835313312598787, "grad_norm": 3.5042668993587323, "learning_rate": 7.15521174543543e-08, "loss": 0.8405, "step": 19290 }, { "epoch": 0.9840411971651456, "grad_norm": 3.1640962134834925, "learning_rate": 6.720445496362083e-08, "loss": 0.8362, "step": 19300 }, { "epoch": 0.9845510630704125, "grad_norm": 2.566985652198935, "learning_rate": 6.299296260329213e-08, "loss": 0.8704, "step": 19310 }, { "epoch": 0.9850609289756794, "grad_norm": 3.5305759962119794, "learning_rate": 5.891765185838671e-08, "loss": 0.8879, "step": 19320 }, { "epoch": 0.9855707948809463, "grad_norm": 4.46440308548654, "learning_rate": 5.4978533842559024e-08, "loss": 0.9011, "step": 19330 }, { "epoch": 0.9860806607862133, "grad_norm": 3.6527246281393744, "learning_rate": 5.117561929804948e-08, "loss": 0.8788, "step": 19340 }, { "epoch": 0.9865905266914802, "grad_norm": 2.8121298555457384, "learning_rate": 4.750891859566786e-08, "loss": 0.8074, "step": 19350 }, { "epoch": 0.9871003925967471, "grad_norm": 3.027267677246048, "learning_rate": 4.3978441734754406e-08, "loss": 0.8067, "step": 19360 }, { "epoch": 0.987610258502014, "grad_norm": 3.7687357203557963, "learning_rate": 4.058419834315763e-08, "loss": 0.8542, "step": 19370 }, { "epoch": 0.9881201244072809, "grad_norm": 3.235570082770969, "learning_rate": 3.732619767720657e-08, "loss": 0.8359, "step": 19380 }, { "epoch": 0.9886299903125478, "grad_norm": 2.787863850157136, "learning_rate": 3.420444862170524e-08, "loss": 0.8792, "step": 19390 }, { "epoch": 0.9891398562178148, "grad_norm": 4.3389220297390585, "learning_rate": 3.121895968986044e-08, "loss": 0.9229, "step": 19400 }, { "epoch": 0.9896497221230817, "grad_norm": 3.668799111181342, "learning_rate": 2.836973902331508e-08, "loss": 0.8328, "step": 19410 }, { "epoch": 0.9901595880283486, "grad_norm": 2.861609318195797, "learning_rate": 2.5656794392076023e-08, "loss": 0.8798, "step": 19420 }, { "epoch": 0.9906694539336155, "grad_norm": 3.661523248549651, "learning_rate": 2.308013319454183e-08, "loss": 0.8445, "step": 19430 }, { "epoch": 0.9911793198388824, "grad_norm": 3.0753511696609053, "learning_rate": 2.06397624574306e-08, "loss": 0.8954, "step": 19440 }, { "epoch": 0.9916891857441493, "grad_norm": 4.111203142330193, "learning_rate": 1.8335688835802167e-08, "loss": 0.8729, "step": 19450 }, { "epoch": 0.9921990516494162, "grad_norm": 4.754379478437848, "learning_rate": 1.6167918613024803e-08, "loss": 0.8961, "step": 19460 }, { "epoch": 0.9927089175546832, "grad_norm": 2.2129405900678845, "learning_rate": 1.4136457700758555e-08, "loss": 0.8235, "step": 19470 }, { "epoch": 0.9932187834599501, "grad_norm": 4.8694078385415756, "learning_rate": 1.2241311638927499e-08, "loss": 0.8728, "step": 19480 }, { "epoch": 0.993728649365217, "grad_norm": 2.6242569895850663, "learning_rate": 1.048248559573084e-08, "loss": 0.8355, "step": 19490 }, { "epoch": 0.9942385152704839, "grad_norm": 2.8544971397067735, "learning_rate": 8.859984367598495e-09, "loss": 0.9015, "step": 19500 }, { "epoch": 0.9947483811757508, "grad_norm": 4.164743889376812, "learning_rate": 7.373812379213307e-09, "loss": 0.8095, "step": 19510 }, { "epoch": 0.9952582470810177, "grad_norm": 3.9843641629018656, "learning_rate": 6.0239736834555306e-09, "loss": 0.8776, "step": 19520 }, { "epoch": 0.9957681129862846, "grad_norm": 2.191261683224035, "learning_rate": 4.810471961436136e-09, "loss": 0.8579, "step": 19530 }, { "epoch": 0.9962779788915516, "grad_norm": 4.4430787556674884, "learning_rate": 3.733310522452405e-09, "loss": 0.9139, "step": 19540 }, { "epoch": 0.9967878447968185, "grad_norm": 3.693711093390687, "learning_rate": 2.792492303999028e-09, "loss": 0.8384, "step": 19550 }, { "epoch": 0.9972977107020854, "grad_norm": 5.494267940102428, "learning_rate": 1.9880198717514565e-09, "loss": 0.8998, "step": 19560 }, { "epoch": 0.9978075766073523, "grad_norm": 3.2250246338769535, "learning_rate": 1.3198954195603463e-09, "loss": 0.9214, "step": 19570 }, { "epoch": 0.9983174425126192, "grad_norm": 3.583682118740531, "learning_rate": 7.881207694404591e-10, "loss": 0.8217, "step": 19580 }, { "epoch": 0.9988273084178861, "grad_norm": 2.9171448962641486, "learning_rate": 3.926973715873139e-10, "loss": 0.8511, "step": 19590 }, { "epoch": 0.999337174323153, "grad_norm": 3.130566308945091, "learning_rate": 1.3362630433833013e-10, "loss": 0.8635, "step": 19600 }, { "epoch": 0.99984704022842, "grad_norm": 3.803166578711262, "learning_rate": 1.090827420613394e-11, "loss": 0.8414, "step": 19610 }, { "epoch": 1.0, "step": 19613, "total_flos": 1.5178649017778176e+16, "train_loss": 1.0492073169314005, "train_runtime": 53079.7491, "train_samples_per_second": 23.647, "train_steps_per_second": 0.37 } ], "logging_steps": 10, "max_steps": 19613, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 1000, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 1.5178649017778176e+16, "train_batch_size": 1, "trial_name": null, "trial_params": null }