| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.997686969930609, | |
| "eval_steps": 500, | |
| "global_step": 1296, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0015420200462606013, | |
| "grad_norm": 0.06960189342498779, | |
| "learning_rate": 1e-05, | |
| "loss": 0.9546, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0030840400925212026, | |
| "grad_norm": 0.07705054432153702, | |
| "learning_rate": 2e-05, | |
| "loss": 1.0023, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.004626060138781804, | |
| "grad_norm": 0.07115544378757477, | |
| "learning_rate": 3e-05, | |
| "loss": 0.9733, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.006168080185042405, | |
| "grad_norm": 0.06770255416631699, | |
| "learning_rate": 4e-05, | |
| "loss": 0.8908, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.007710100231303007, | |
| "grad_norm": 0.06454406678676605, | |
| "learning_rate": 5e-05, | |
| "loss": 0.9168, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009252120277563608, | |
| "grad_norm": 0.0818350687623024, | |
| "learning_rate": 6e-05, | |
| "loss": 0.8822, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01079414032382421, | |
| "grad_norm": 0.08504347503185272, | |
| "learning_rate": 7e-05, | |
| "loss": 0.9563, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.01233616037008481, | |
| "grad_norm": 0.08642569184303284, | |
| "learning_rate": 8e-05, | |
| "loss": 0.9324, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.013878180416345412, | |
| "grad_norm": 0.07200746238231659, | |
| "learning_rate": 9e-05, | |
| "loss": 0.8557, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.015420200462606014, | |
| "grad_norm": 0.0539001002907753, | |
| "learning_rate": 0.0001, | |
| "loss": 0.8316, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.016962220508866616, | |
| "grad_norm": 0.05532313138246536, | |
| "learning_rate": 0.00011000000000000002, | |
| "loss": 0.869, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.018504240555127217, | |
| "grad_norm": 0.04828835651278496, | |
| "learning_rate": 0.00012, | |
| "loss": 0.9004, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.020046260601387818, | |
| "grad_norm": 0.051899779587984085, | |
| "learning_rate": 0.00013000000000000002, | |
| "loss": 0.7818, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.02158828064764842, | |
| "grad_norm": 0.07032614946365356, | |
| "learning_rate": 0.00014, | |
| "loss": 0.7946, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.02313030069390902, | |
| "grad_norm": 0.08230099081993103, | |
| "learning_rate": 0.00015000000000000001, | |
| "loss": 0.934, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.02467232074016962, | |
| "grad_norm": 0.08811169862747192, | |
| "learning_rate": 0.00016, | |
| "loss": 0.8621, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.026214340786430222, | |
| "grad_norm": 0.062236446887254715, | |
| "learning_rate": 0.00017, | |
| "loss": 0.8186, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.027756360832690823, | |
| "grad_norm": 0.052294306457042694, | |
| "learning_rate": 0.00018, | |
| "loss": 0.8124, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.029298380878951428, | |
| "grad_norm": 0.032340776175260544, | |
| "learning_rate": 0.00019, | |
| "loss": 0.8598, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.03084040092521203, | |
| "grad_norm": 0.028073711320757866, | |
| "learning_rate": 0.0002, | |
| "loss": 0.9087, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03238242097147263, | |
| "grad_norm": 0.027820633724331856, | |
| "learning_rate": 0.00019999969691239107, | |
| "loss": 0.7093, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.03392444101773323, | |
| "grad_norm": 0.032430652529001236, | |
| "learning_rate": 0.00019999878765140146, | |
| "loss": 0.7825, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.03546646106399383, | |
| "grad_norm": 0.034685954451560974, | |
| "learning_rate": 0.000199997272222543, | |
| "loss": 0.7787, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.03700848111025443, | |
| "grad_norm": 0.033446088433265686, | |
| "learning_rate": 0.0001999951506350017, | |
| "loss": 0.8636, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.03855050115651504, | |
| "grad_norm": 0.026805371046066284, | |
| "learning_rate": 0.0001999924229016382, | |
| "loss": 0.904, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.040092521202775636, | |
| "grad_norm": 0.024178853258490562, | |
| "learning_rate": 0.0001999890890389873, | |
| "loss": 0.7428, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.04163454124903624, | |
| "grad_norm": 0.024074744433164597, | |
| "learning_rate": 0.00019998514906725804, | |
| "loss": 0.8562, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.04317656129529684, | |
| "grad_norm": 0.025082379579544067, | |
| "learning_rate": 0.0001999806030103336, | |
| "loss": 0.8078, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.04471858134155744, | |
| "grad_norm": 0.027726231142878532, | |
| "learning_rate": 0.00019997545089577102, | |
| "loss": 0.7133, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.04626060138781804, | |
| "grad_norm": 0.02736576274037361, | |
| "learning_rate": 0.00019996969275480115, | |
| "loss": 0.7826, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.047802621434078645, | |
| "grad_norm": 0.0234544537961483, | |
| "learning_rate": 0.0001999633286223284, | |
| "loss": 0.7424, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.04934464148033924, | |
| "grad_norm": 0.025730784982442856, | |
| "learning_rate": 0.00019995635853693058, | |
| "loss": 0.7584, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.05088666152659985, | |
| "grad_norm": 0.02729860506951809, | |
| "learning_rate": 0.00019994878254085861, | |
| "loss": 0.73, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.052428681572860444, | |
| "grad_norm": 0.027428491041064262, | |
| "learning_rate": 0.00019994060068003627, | |
| "loss": 0.854, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.05397070161912105, | |
| "grad_norm": 0.024362564086914062, | |
| "learning_rate": 0.00019993181300406005, | |
| "loss": 0.7441, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.05551272166538165, | |
| "grad_norm": 0.024825185537338257, | |
| "learning_rate": 0.00019992241956619863, | |
| "loss": 0.7367, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.05705474171164225, | |
| "grad_norm": 0.023908289149403572, | |
| "learning_rate": 0.00019991242042339264, | |
| "loss": 0.8266, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.058596761757902856, | |
| "grad_norm": 0.023762725293636322, | |
| "learning_rate": 0.00019990181563625447, | |
| "loss": 0.829, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.06013878180416345, | |
| "grad_norm": 0.02654297836124897, | |
| "learning_rate": 0.0001998906052690677, | |
| "loss": 0.7635, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.06168080185042406, | |
| "grad_norm": 0.026318082585930824, | |
| "learning_rate": 0.00019987878938978684, | |
| "loss": 0.7623, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06322282189668466, | |
| "grad_norm": 0.024309197440743446, | |
| "learning_rate": 0.00019986636807003673, | |
| "loss": 0.7494, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.06476484194294525, | |
| "grad_norm": 0.022980719804763794, | |
| "learning_rate": 0.00019985334138511237, | |
| "loss": 0.748, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.06630686198920586, | |
| "grad_norm": 0.023796193301677704, | |
| "learning_rate": 0.00019983970941397835, | |
| "loss": 0.8684, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.06784888203546646, | |
| "grad_norm": 0.02435590885579586, | |
| "learning_rate": 0.00019982547223926824, | |
| "loss": 0.6749, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.06939090208172706, | |
| "grad_norm": 0.026482658460736275, | |
| "learning_rate": 0.0001998106299472843, | |
| "loss": 0.7684, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07093292212798766, | |
| "grad_norm": 0.025558117777109146, | |
| "learning_rate": 0.0001997951826279968, | |
| "loss": 0.7507, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.07247494217424827, | |
| "grad_norm": 0.027371902018785477, | |
| "learning_rate": 0.00019977913037504355, | |
| "loss": 0.7377, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.07401696222050887, | |
| "grad_norm": 0.0234097708016634, | |
| "learning_rate": 0.00019976247328572938, | |
| "loss": 0.7675, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.07555898226676946, | |
| "grad_norm": 0.0241215992718935, | |
| "learning_rate": 0.00019974521146102537, | |
| "loss": 0.8079, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.07710100231303008, | |
| "grad_norm": 0.025045258924365044, | |
| "learning_rate": 0.00019972734500556846, | |
| "loss": 0.8407, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07864302235929067, | |
| "grad_norm": 0.02574036829173565, | |
| "learning_rate": 0.0001997088740276607, | |
| "loss": 0.767, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.08018504240555127, | |
| "grad_norm": 0.02342085726559162, | |
| "learning_rate": 0.00019968979863926856, | |
| "loss": 0.7373, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.08172706245181187, | |
| "grad_norm": 0.023467406630516052, | |
| "learning_rate": 0.0001996701189560223, | |
| "loss": 0.6529, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.08326908249807248, | |
| "grad_norm": 0.02330499142408371, | |
| "learning_rate": 0.00019964983509721527, | |
| "loss": 0.7347, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.08481110254433308, | |
| "grad_norm": 0.027940964326262474, | |
| "learning_rate": 0.00019962894718580324, | |
| "loss": 0.9313, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08635312259059368, | |
| "grad_norm": 0.024525761604309082, | |
| "learning_rate": 0.00019960745534840354, | |
| "loss": 0.767, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.08789514263685427, | |
| "grad_norm": 0.024538526311516762, | |
| "learning_rate": 0.00019958535971529434, | |
| "loss": 0.7659, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.08943716268311488, | |
| "grad_norm": 0.02253701537847519, | |
| "learning_rate": 0.00019956266042041394, | |
| "loss": 0.7805, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.09097918272937548, | |
| "grad_norm": 0.023676637560129166, | |
| "learning_rate": 0.0001995393576013598, | |
| "loss": 0.7894, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.09252120277563608, | |
| "grad_norm": 0.022134529426693916, | |
| "learning_rate": 0.0001995154513993878, | |
| "loss": 0.7492, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.09406322282189669, | |
| "grad_norm": 0.02350509911775589, | |
| "learning_rate": 0.00019949094195941152, | |
| "loss": 0.6902, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.09560524286815729, | |
| "grad_norm": 0.02464171312749386, | |
| "learning_rate": 0.00019946582943000102, | |
| "loss": 0.7836, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.09714726291441789, | |
| "grad_norm": 0.023095758631825447, | |
| "learning_rate": 0.00019944011396338222, | |
| "loss": 0.8321, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.09868928296067848, | |
| "grad_norm": 0.026240425184369087, | |
| "learning_rate": 0.00019941379571543596, | |
| "loss": 0.8461, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.1002313030069391, | |
| "grad_norm": 0.02174345962703228, | |
| "learning_rate": 0.00019938687484569693, | |
| "loss": 0.6388, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.1017733230531997, | |
| "grad_norm": 0.02867325395345688, | |
| "learning_rate": 0.00019935935151735277, | |
| "loss": 0.826, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.10331534309946029, | |
| "grad_norm": 0.02631618268787861, | |
| "learning_rate": 0.00019933122589724302, | |
| "loss": 0.9407, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.10485736314572089, | |
| "grad_norm": 0.022020747885107994, | |
| "learning_rate": 0.0001993024981558583, | |
| "loss": 0.7004, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.1063993831919815, | |
| "grad_norm": 0.02743780054152012, | |
| "learning_rate": 0.000199273168467339, | |
| "loss": 0.7607, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.1079414032382421, | |
| "grad_norm": 0.028378015384078026, | |
| "learning_rate": 0.00019924323700947448, | |
| "loss": 0.8604, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.1094834232845027, | |
| "grad_norm": 0.0275627039372921, | |
| "learning_rate": 0.00019921270396370172, | |
| "loss": 0.835, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.1110254433307633, | |
| "grad_norm": 0.023099975660443306, | |
| "learning_rate": 0.0001991815695151046, | |
| "loss": 0.7028, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.1125674633770239, | |
| "grad_norm": 0.028545403853058815, | |
| "learning_rate": 0.00019914983385241236, | |
| "loss": 0.8248, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.1141094834232845, | |
| "grad_norm": 0.02746577188372612, | |
| "learning_rate": 0.00019911749716799873, | |
| "loss": 0.7309, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.1156515034695451, | |
| "grad_norm": 0.024899670854210854, | |
| "learning_rate": 0.00019908455965788067, | |
| "loss": 0.7473, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11719352351580571, | |
| "grad_norm": 0.024973087012767792, | |
| "learning_rate": 0.00019905102152171727, | |
| "loss": 0.8362, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.11873554356206631, | |
| "grad_norm": 0.023668723180890083, | |
| "learning_rate": 0.0001990168829628083, | |
| "loss": 0.7677, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.1202775636083269, | |
| "grad_norm": 0.02495860867202282, | |
| "learning_rate": 0.0001989821441880933, | |
| "loss": 0.7341, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.1218195836545875, | |
| "grad_norm": 0.02537156455218792, | |
| "learning_rate": 0.00019894680540815006, | |
| "loss": 0.6767, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.12336160370084812, | |
| "grad_norm": 0.0246786717325449, | |
| "learning_rate": 0.0001989108668371936, | |
| "loss": 0.7959, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.12490362374710871, | |
| "grad_norm": 0.02471376582980156, | |
| "learning_rate": 0.00019887432869307458, | |
| "loss": 0.6787, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.1264456437933693, | |
| "grad_norm": 0.025275586172938347, | |
| "learning_rate": 0.00019883719119727816, | |
| "loss": 0.7753, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.12798766383962992, | |
| "grad_norm": 0.021094506606459618, | |
| "learning_rate": 0.00019879945457492267, | |
| "loss": 0.758, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.1295296838858905, | |
| "grad_norm": 0.02534683421254158, | |
| "learning_rate": 0.00019876111905475815, | |
| "loss": 0.818, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.13107170393215112, | |
| "grad_norm": 0.024033140391111374, | |
| "learning_rate": 0.00019872218486916498, | |
| "loss": 0.775, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.13261372397841173, | |
| "grad_norm": 0.023884933441877365, | |
| "learning_rate": 0.00019868265225415265, | |
| "loss": 0.7918, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.1341557440246723, | |
| "grad_norm": 0.026067111641168594, | |
| "learning_rate": 0.00019864252144935794, | |
| "loss": 0.8368, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.13569776407093292, | |
| "grad_norm": 0.02631264552474022, | |
| "learning_rate": 0.00019860179269804394, | |
| "loss": 0.7138, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.13723978411719354, | |
| "grad_norm": 0.022870918735861778, | |
| "learning_rate": 0.00019856046624709822, | |
| "loss": 0.7423, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.13878180416345412, | |
| "grad_norm": 0.02922765538096428, | |
| "learning_rate": 0.00019851854234703145, | |
| "loss": 0.7498, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.14032382420971473, | |
| "grad_norm": 0.02589617855846882, | |
| "learning_rate": 0.00019847602125197598, | |
| "loss": 0.7438, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.14186584425597532, | |
| "grad_norm": 0.02394738420844078, | |
| "learning_rate": 0.00019843290321968412, | |
| "loss": 0.7094, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.14340786430223593, | |
| "grad_norm": 0.02237016148865223, | |
| "learning_rate": 0.0001983891885115267, | |
| "loss": 0.7868, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.14494988434849654, | |
| "grad_norm": 0.028733767569065094, | |
| "learning_rate": 0.00019834487739249146, | |
| "loss": 0.8178, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.14649190439475712, | |
| "grad_norm": 0.023086342960596085, | |
| "learning_rate": 0.0001982999701311814, | |
| "loss": 0.8368, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.14803392444101773, | |
| "grad_norm": 0.025624489411711693, | |
| "learning_rate": 0.0001982544669998132, | |
| "loss": 0.7731, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.14957594448727835, | |
| "grad_norm": 0.028302457183599472, | |
| "learning_rate": 0.0001982083682742156, | |
| "loss": 0.7942, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.15111796453353893, | |
| "grad_norm": 0.025008324533700943, | |
| "learning_rate": 0.00019816167423382765, | |
| "loss": 0.7393, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.15265998457979954, | |
| "grad_norm": 0.026291735470294952, | |
| "learning_rate": 0.00019811438516169702, | |
| "loss": 0.8016, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.15420200462606015, | |
| "grad_norm": 0.031547173857688904, | |
| "learning_rate": 0.00019806650134447838, | |
| "loss": 0.8597, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.15574402467232074, | |
| "grad_norm": 0.024978285655379295, | |
| "learning_rate": 0.00019801802307243153, | |
| "loss": 0.8182, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.15728604471858135, | |
| "grad_norm": 0.023977672681212425, | |
| "learning_rate": 0.00019796895063941978, | |
| "loss": 0.8374, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.15882806476484193, | |
| "grad_norm": 0.025743963196873665, | |
| "learning_rate": 0.000197919284342908, | |
| "loss": 0.7326, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.16037008481110254, | |
| "grad_norm": 0.02554011158645153, | |
| "learning_rate": 0.00019786902448396104, | |
| "loss": 0.7703, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.16191210485736315, | |
| "grad_norm": 0.027971483767032623, | |
| "learning_rate": 0.00019781817136724165, | |
| "loss": 0.6845, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.16345412490362374, | |
| "grad_norm": 0.02789183147251606, | |
| "learning_rate": 0.00019776672530100886, | |
| "loss": 0.838, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.16499614494988435, | |
| "grad_norm": 0.029534442350268364, | |
| "learning_rate": 0.00019771468659711595, | |
| "loss": 0.7428, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.16653816499614496, | |
| "grad_norm": 0.02910265140235424, | |
| "learning_rate": 0.00019766205557100868, | |
| "loss": 0.8891, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.16808018504240554, | |
| "grad_norm": 0.02285209856927395, | |
| "learning_rate": 0.00019760883254172327, | |
| "loss": 0.7425, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.16962220508866616, | |
| "grad_norm": 0.025452135130763054, | |
| "learning_rate": 0.0001975550178318845, | |
| "loss": 0.7617, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.17116422513492677, | |
| "grad_norm": 0.02843882516026497, | |
| "learning_rate": 0.00019750061176770385, | |
| "loss": 0.9045, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.17270624518118735, | |
| "grad_norm": 0.026800749823451042, | |
| "learning_rate": 0.00019744561467897735, | |
| "loss": 0.72, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.17424826522744796, | |
| "grad_norm": 0.021131988614797592, | |
| "learning_rate": 0.00019739002689908377, | |
| "loss": 0.6511, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.17579028527370855, | |
| "grad_norm": 0.02588481456041336, | |
| "learning_rate": 0.00019733384876498245, | |
| "loss": 0.7168, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.17733230531996916, | |
| "grad_norm": 0.031303439289331436, | |
| "learning_rate": 0.00019727708061721133, | |
| "loss": 0.8685, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.17887432536622977, | |
| "grad_norm": 0.02867058850824833, | |
| "learning_rate": 0.00019721972279988477, | |
| "loss": 0.655, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.18041634541249035, | |
| "grad_norm": 0.03866586834192276, | |
| "learning_rate": 0.00019716177566069174, | |
| "loss": 0.7957, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.18195836545875096, | |
| "grad_norm": 0.029205329716205597, | |
| "learning_rate": 0.00019710323955089343, | |
| "loss": 0.7617, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.18350038550501158, | |
| "grad_norm": 0.024928180500864983, | |
| "learning_rate": 0.00019704411482532116, | |
| "loss": 0.6982, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.18504240555127216, | |
| "grad_norm": 0.02545573003590107, | |
| "learning_rate": 0.0001969844018423744, | |
| "loss": 0.6067, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.18658442559753277, | |
| "grad_norm": 0.02810928039252758, | |
| "learning_rate": 0.0001969241009640185, | |
| "loss": 0.7112, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.18812644564379338, | |
| "grad_norm": 0.022296108305454254, | |
| "learning_rate": 0.00019686321255578238, | |
| "loss": 0.6598, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.18966846569005397, | |
| "grad_norm": 0.02429027482867241, | |
| "learning_rate": 0.00019680173698675648, | |
| "loss": 0.6381, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.19121048573631458, | |
| "grad_norm": 0.027076730504631996, | |
| "learning_rate": 0.0001967396746295905, | |
| "loss": 0.717, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.19275250578257516, | |
| "grad_norm": 0.02401566132903099, | |
| "learning_rate": 0.00019667702586049108, | |
| "loss": 0.8002, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.19429452582883577, | |
| "grad_norm": 0.024678878486156464, | |
| "learning_rate": 0.00019661379105921948, | |
| "loss": 0.7834, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.19583654587509639, | |
| "grad_norm": 0.029240388423204422, | |
| "learning_rate": 0.00019654997060908946, | |
| "loss": 0.8793, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.19737856592135697, | |
| "grad_norm": 0.02550147846341133, | |
| "learning_rate": 0.0001964855648969647, | |
| "loss": 0.6742, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.19892058596761758, | |
| "grad_norm": 0.02416900172829628, | |
| "learning_rate": 0.00019642057431325672, | |
| "loss": 0.7728, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.2004626060138782, | |
| "grad_norm": 0.024728331714868546, | |
| "learning_rate": 0.0001963549992519223, | |
| "loss": 0.7237, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.20200462606013878, | |
| "grad_norm": 0.025203561410307884, | |
| "learning_rate": 0.00019628884011046123, | |
| "loss": 0.7491, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.2035466461063994, | |
| "grad_norm": 0.02104656957089901, | |
| "learning_rate": 0.00019622209728991383, | |
| "loss": 0.7324, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.20508866615265997, | |
| "grad_norm": 0.03518475592136383, | |
| "learning_rate": 0.00019615477119485855, | |
| "loss": 0.8982, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.20663068619892058, | |
| "grad_norm": 0.026010941714048386, | |
| "learning_rate": 0.00019608686223340945, | |
| "loss": 0.7451, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.2081727062451812, | |
| "grad_norm": 0.025345437228679657, | |
| "learning_rate": 0.00019601837081721386, | |
| "loss": 0.705, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.20971472629144178, | |
| "grad_norm": 0.02374056540429592, | |
| "learning_rate": 0.00019594929736144976, | |
| "loss": 0.7307, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.2112567463377024, | |
| "grad_norm": 0.026990080252289772, | |
| "learning_rate": 0.00019587964228482332, | |
| "loss": 0.8173, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.212798766383963, | |
| "grad_norm": 0.026662928983569145, | |
| "learning_rate": 0.00019580940600956638, | |
| "loss": 0.818, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.21434078643022358, | |
| "grad_norm": 0.026762284338474274, | |
| "learning_rate": 0.00019573858896143376, | |
| "loss": 0.6674, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.2158828064764842, | |
| "grad_norm": 0.02683679386973381, | |
| "learning_rate": 0.00019566719156970095, | |
| "loss": 0.7569, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2174248265227448, | |
| "grad_norm": 0.022990800440311432, | |
| "learning_rate": 0.00019559521426716118, | |
| "loss": 0.7051, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.2189668465690054, | |
| "grad_norm": 0.022913858294487, | |
| "learning_rate": 0.00019552265749012303, | |
| "loss": 0.7483, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.220508866615266, | |
| "grad_norm": 0.025732524693012238, | |
| "learning_rate": 0.00019544952167840777, | |
| "loss": 0.8484, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.2220508866615266, | |
| "grad_norm": 0.024254556745290756, | |
| "learning_rate": 0.00019537580727534644, | |
| "loss": 0.747, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.2235929067077872, | |
| "grad_norm": 0.02940620854496956, | |
| "learning_rate": 0.0001953015147277776, | |
| "loss": 0.8204, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.2251349267540478, | |
| "grad_norm": 0.02590208128094673, | |
| "learning_rate": 0.00019522664448604418, | |
| "loss": 0.7778, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.2266769468003084, | |
| "grad_norm": 0.02748725563287735, | |
| "learning_rate": 0.00019515119700399107, | |
| "loss": 0.7782, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.228218966846569, | |
| "grad_norm": 0.024179786443710327, | |
| "learning_rate": 0.00019507517273896222, | |
| "loss": 0.7651, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.22976098689282962, | |
| "grad_norm": 0.02463974617421627, | |
| "learning_rate": 0.00019499857215179786, | |
| "loss": 0.8215, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.2313030069390902, | |
| "grad_norm": 0.024559814482927322, | |
| "learning_rate": 0.00019492139570683178, | |
| "loss": 0.7539, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.2328450269853508, | |
| "grad_norm": 0.023420870304107666, | |
| "learning_rate": 0.00019484364387188847, | |
| "loss": 0.7035, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.23438704703161142, | |
| "grad_norm": 0.026096729561686516, | |
| "learning_rate": 0.00019476531711828027, | |
| "loss": 0.8033, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.235929067077872, | |
| "grad_norm": 0.02388446033000946, | |
| "learning_rate": 0.0001946864159208045, | |
| "loss": 0.6746, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.23747108712413262, | |
| "grad_norm": 0.02306438237428665, | |
| "learning_rate": 0.0001946069407577408, | |
| "loss": 0.7062, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.2390131071703932, | |
| "grad_norm": 0.024697955697774887, | |
| "learning_rate": 0.00019452689211084775, | |
| "loss": 0.7691, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.2405551272166538, | |
| "grad_norm": 0.026947690173983574, | |
| "learning_rate": 0.00019444627046536056, | |
| "loss": 0.7347, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.24209714726291442, | |
| "grad_norm": 0.02345297671854496, | |
| "learning_rate": 0.00019436507630998757, | |
| "loss": 0.745, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.243639167309175, | |
| "grad_norm": 0.029198188334703445, | |
| "learning_rate": 0.00019428331013690765, | |
| "loss": 0.7862, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.24518118735543562, | |
| "grad_norm": 0.025465266779065132, | |
| "learning_rate": 0.00019420097244176706, | |
| "loss": 0.6685, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.24672320740169623, | |
| "grad_norm": 0.02786502055823803, | |
| "learning_rate": 0.00019411806372367655, | |
| "loss": 0.725, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.24826522744795682, | |
| "grad_norm": 0.02317357063293457, | |
| "learning_rate": 0.0001940345844852082, | |
| "loss": 0.7075, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.24980724749421743, | |
| "grad_norm": 0.02511444129049778, | |
| "learning_rate": 0.00019395053523239245, | |
| "loss": 0.7102, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.25134926754047804, | |
| "grad_norm": 0.02567203901708126, | |
| "learning_rate": 0.00019386591647471506, | |
| "loss": 0.8113, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.2528912875867386, | |
| "grad_norm": 0.02611825056374073, | |
| "learning_rate": 0.00019378072872511398, | |
| "loss": 0.786, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.2544333076329992, | |
| "grad_norm": 0.02788010984659195, | |
| "learning_rate": 0.0001936949724999762, | |
| "loss": 0.684, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.25597532767925985, | |
| "grad_norm": 0.026200013235211372, | |
| "learning_rate": 0.0001936086483191347, | |
| "loss": 0.7563, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.25751734772552043, | |
| "grad_norm": 0.024887658655643463, | |
| "learning_rate": 0.00019352175670586533, | |
| "loss": 0.6896, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.259059367771781, | |
| "grad_norm": 0.027471961453557014, | |
| "learning_rate": 0.00019343429818688347, | |
| "loss": 0.8109, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.26060138781804165, | |
| "grad_norm": 0.024350160732865334, | |
| "learning_rate": 0.00019334627329234102, | |
| "loss": 0.7178, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.26214340786430224, | |
| "grad_norm": 0.02346990443766117, | |
| "learning_rate": 0.00019325768255582302, | |
| "loss": 0.6508, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.2636854279105628, | |
| "grad_norm": 0.028655072674155235, | |
| "learning_rate": 0.00019316852651434462, | |
| "loss": 0.7036, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.26522744795682346, | |
| "grad_norm": 0.024629781022667885, | |
| "learning_rate": 0.0001930788057083476, | |
| "loss": 0.774, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.26676946800308404, | |
| "grad_norm": 0.023107299581170082, | |
| "learning_rate": 0.0001929885206816973, | |
| "loss": 0.7021, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.2683114880493446, | |
| "grad_norm": 0.022794177755713463, | |
| "learning_rate": 0.00019289767198167916, | |
| "loss": 0.7469, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.26985350809560527, | |
| "grad_norm": 0.026097161695361137, | |
| "learning_rate": 0.00019280626015899546, | |
| "loss": 0.7325, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.27139552814186585, | |
| "grad_norm": 0.029879910871386528, | |
| "learning_rate": 0.00019271428576776205, | |
| "loss": 0.7614, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.27293754818812643, | |
| "grad_norm": 0.026986606419086456, | |
| "learning_rate": 0.00019262174936550487, | |
| "loss": 0.7718, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.2744795682343871, | |
| "grad_norm": 0.025835467502474785, | |
| "learning_rate": 0.00019252865151315665, | |
| "loss": 0.7511, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.27602158828064766, | |
| "grad_norm": 0.028101902455091476, | |
| "learning_rate": 0.00019243499277505355, | |
| "loss": 0.8136, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.27756360832690824, | |
| "grad_norm": 0.028153471648693085, | |
| "learning_rate": 0.00019234077371893155, | |
| "loss": 0.8798, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.2791056283731688, | |
| "grad_norm": 0.024540267884731293, | |
| "learning_rate": 0.0001922459949159233, | |
| "loss": 0.7854, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.28064764841942946, | |
| "grad_norm": 0.023485183715820312, | |
| "learning_rate": 0.00019215065694055437, | |
| "loss": 0.6655, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.28218966846569005, | |
| "grad_norm": 0.023394625633955002, | |
| "learning_rate": 0.00019205476037073997, | |
| "loss": 0.759, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.28373168851195063, | |
| "grad_norm": 0.025181008502840996, | |
| "learning_rate": 0.00019195830578778132, | |
| "loss": 0.7649, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.28527370855821127, | |
| "grad_norm": 0.023696815595030785, | |
| "learning_rate": 0.0001918612937763622, | |
| "loss": 0.7469, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.28681572860447185, | |
| "grad_norm": 0.025794658809900284, | |
| "learning_rate": 0.00019176372492454537, | |
| "loss": 0.7623, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.28835774865073244, | |
| "grad_norm": 0.02523699589073658, | |
| "learning_rate": 0.00019166559982376904, | |
| "loss": 0.6621, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2898997686969931, | |
| "grad_norm": 0.02426300384104252, | |
| "learning_rate": 0.00019156691906884325, | |
| "loss": 0.748, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.29144178874325366, | |
| "grad_norm": 0.024227775633335114, | |
| "learning_rate": 0.0001914676832579463, | |
| "loss": 0.724, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.29298380878951424, | |
| "grad_norm": 0.031684551388025284, | |
| "learning_rate": 0.00019136789299262108, | |
| "loss": 0.8939, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.2945258288357749, | |
| "grad_norm": 0.023766616359353065, | |
| "learning_rate": 0.0001912675488777714, | |
| "loss": 0.7179, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.29606784888203547, | |
| "grad_norm": 0.02463400922715664, | |
| "learning_rate": 0.0001911666515216585, | |
| "loss": 0.8202, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.29760986892829605, | |
| "grad_norm": 0.0225905179977417, | |
| "learning_rate": 0.00019106520153589708, | |
| "loss": 0.6357, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.2991518889745567, | |
| "grad_norm": 0.022882292047142982, | |
| "learning_rate": 0.00019096319953545185, | |
| "loss": 0.7506, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.3006939090208173, | |
| "grad_norm": 0.027596216648817062, | |
| "learning_rate": 0.00019086064613863364, | |
| "loss": 0.7669, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.30223592906707786, | |
| "grad_norm": 0.030367175117135048, | |
| "learning_rate": 0.00019075754196709572, | |
| "loss": 0.7792, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.3037779491133385, | |
| "grad_norm": 0.025017013773322105, | |
| "learning_rate": 0.00019065388764583004, | |
| "loss": 0.6977, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.3053199691595991, | |
| "grad_norm": 0.02788584679365158, | |
| "learning_rate": 0.0001905496838031634, | |
| "loss": 0.6871, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.30686198920585966, | |
| "grad_norm": 0.032745130360126495, | |
| "learning_rate": 0.00019044493107075368, | |
| "loss": 0.8934, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.3084040092521203, | |
| "grad_norm": 0.027039945125579834, | |
| "learning_rate": 0.00019033963008358598, | |
| "loss": 0.6522, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.3099460292983809, | |
| "grad_norm": 0.03149978816509247, | |
| "learning_rate": 0.0001902337814799688, | |
| "loss": 0.844, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.31148804934464147, | |
| "grad_norm": 0.024369308724999428, | |
| "learning_rate": 0.0001901273859015301, | |
| "loss": 0.7202, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.31303006939090205, | |
| "grad_norm": 0.05448361113667488, | |
| "learning_rate": 0.00019002044399321356, | |
| "loss": 0.8301, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.3145720894371627, | |
| "grad_norm": 0.02388385497033596, | |
| "learning_rate": 0.0001899129564032745, | |
| "loss": 0.7105, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.3161141094834233, | |
| "grad_norm": 0.02488291636109352, | |
| "learning_rate": 0.00018980492378327607, | |
| "loss": 0.7393, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.31765612952968386, | |
| "grad_norm": 0.023874662816524506, | |
| "learning_rate": 0.00018969634678808522, | |
| "loss": 0.6791, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.3191981495759445, | |
| "grad_norm": 0.030418075621128082, | |
| "learning_rate": 0.0001895872260758688, | |
| "loss": 0.75, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.3207401696222051, | |
| "grad_norm": 0.02990088053047657, | |
| "learning_rate": 0.00018947756230808954, | |
| "loss": 0.7986, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.32228218966846567, | |
| "grad_norm": 0.027980022132396698, | |
| "learning_rate": 0.00018936735614950197, | |
| "loss": 0.7054, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.3238242097147263, | |
| "grad_norm": 0.026269223541021347, | |
| "learning_rate": 0.00018925660826814856, | |
| "loss": 0.8195, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3253662297609869, | |
| "grad_norm": 0.025045178830623627, | |
| "learning_rate": 0.0001891453193353555, | |
| "loss": 0.7221, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.3269082498072475, | |
| "grad_norm": 0.021175356581807137, | |
| "learning_rate": 0.00018903349002572873, | |
| "loss": 0.6513, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.3284502698535081, | |
| "grad_norm": 0.023593388497829437, | |
| "learning_rate": 0.0001889211210171498, | |
| "loss": 0.7405, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.3299922898997687, | |
| "grad_norm": 0.027270464226603508, | |
| "learning_rate": 0.00018880821299077183, | |
| "loss": 0.7184, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.3315343099460293, | |
| "grad_norm": 0.025203121826052666, | |
| "learning_rate": 0.00018869476663101523, | |
| "loss": 0.6659, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.3330763299922899, | |
| "grad_norm": 0.024924185127019882, | |
| "learning_rate": 0.0001885807826255638, | |
| "loss": 0.7412, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.3346183500385505, | |
| "grad_norm": 0.022862501442432404, | |
| "learning_rate": 0.00018846626166536026, | |
| "loss": 0.6984, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.3361603700848111, | |
| "grad_norm": 0.022781461477279663, | |
| "learning_rate": 0.0001883512044446023, | |
| "loss": 0.6374, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.33770239013107173, | |
| "grad_norm": 0.023618346080183983, | |
| "learning_rate": 0.0001882356116607383, | |
| "loss": 0.6948, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.3392444101773323, | |
| "grad_norm": 0.02586747333407402, | |
| "learning_rate": 0.0001881194840144631, | |
| "loss": 0.7682, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.3407864302235929, | |
| "grad_norm": 0.026834698393940926, | |
| "learning_rate": 0.00018800282220971366, | |
| "loss": 0.7546, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.34232845026985353, | |
| "grad_norm": 0.028564658015966415, | |
| "learning_rate": 0.00018788562695366495, | |
| "loss": 0.8267, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.3438704703161141, | |
| "grad_norm": 0.02490355260670185, | |
| "learning_rate": 0.00018776789895672558, | |
| "loss": 0.6937, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.3454124903623747, | |
| "grad_norm": 0.03545152395963669, | |
| "learning_rate": 0.00018764963893253347, | |
| "loss": 0.7001, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.3469545104086353, | |
| "grad_norm": 0.02449451945722103, | |
| "learning_rate": 0.00018753084759795158, | |
| "loss": 0.8165, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3484965304548959, | |
| "grad_norm": 0.027851196005940437, | |
| "learning_rate": 0.00018741152567306355, | |
| "loss": 0.7196, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.3500385505011565, | |
| "grad_norm": 0.02707446552813053, | |
| "learning_rate": 0.00018729167388116934, | |
| "loss": 0.8375, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.3515805705474171, | |
| "grad_norm": 0.02902469038963318, | |
| "learning_rate": 0.00018717129294878074, | |
| "loss": 0.6744, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.35312259059367773, | |
| "grad_norm": 0.031537748873233795, | |
| "learning_rate": 0.0001870503836056172, | |
| "loss": 0.7552, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.3546646106399383, | |
| "grad_norm": 0.026265786960721016, | |
| "learning_rate": 0.00018692894658460117, | |
| "loss": 0.7551, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3562066306861989, | |
| "grad_norm": 0.02483406662940979, | |
| "learning_rate": 0.0001868069826218538, | |
| "loss": 0.7233, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.35774865073245954, | |
| "grad_norm": 0.024800019338726997, | |
| "learning_rate": 0.0001866844924566904, | |
| "loss": 0.7371, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.3592906707787201, | |
| "grad_norm": 0.02515244670212269, | |
| "learning_rate": 0.00018656147683161593, | |
| "loss": 0.7621, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.3608326908249807, | |
| "grad_norm": 0.02592633105814457, | |
| "learning_rate": 0.00018643793649232072, | |
| "loss": 0.7539, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.36237471087124135, | |
| "grad_norm": 0.0275077186524868, | |
| "learning_rate": 0.00018631387218767561, | |
| "loss": 0.6925, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.36391673091750193, | |
| "grad_norm": 0.027163324877619743, | |
| "learning_rate": 0.00018618928466972775, | |
| "loss": 0.7867, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.3654587509637625, | |
| "grad_norm": 0.026956308633089066, | |
| "learning_rate": 0.0001860641746936957, | |
| "loss": 0.7813, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.36700077101002315, | |
| "grad_norm": 0.02884814888238907, | |
| "learning_rate": 0.0001859385430179652, | |
| "loss": 0.7366, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.36854279105628374, | |
| "grad_norm": 0.025071945041418076, | |
| "learning_rate": 0.00018581239040408432, | |
| "loss": 0.708, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.3700848111025443, | |
| "grad_norm": 0.032973822206258774, | |
| "learning_rate": 0.00018568571761675893, | |
| "loss": 0.6544, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.37162683114880496, | |
| "grad_norm": 0.02571587637066841, | |
| "learning_rate": 0.0001855585254238481, | |
| "loss": 0.7633, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.37316885119506554, | |
| "grad_norm": 0.027229083701968193, | |
| "learning_rate": 0.00018543081459635935, | |
| "loss": 0.7752, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.3747108712413261, | |
| "grad_norm": 0.022508805617690086, | |
| "learning_rate": 0.00018530258590844409, | |
| "loss": 0.6437, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.37625289128758677, | |
| "grad_norm": 0.026772433891892433, | |
| "learning_rate": 0.00018517384013739285, | |
| "loss": 0.805, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.37779491133384735, | |
| "grad_norm": 0.023964572697877884, | |
| "learning_rate": 0.00018504457806363056, | |
| "loss": 0.7378, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.37933693138010793, | |
| "grad_norm": 0.02789299376308918, | |
| "learning_rate": 0.0001849148004707119, | |
| "loss": 0.772, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.3808789514263685, | |
| "grad_norm": 0.031168216839432716, | |
| "learning_rate": 0.00018478450814531647, | |
| "loss": 0.8299, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.38242097147262916, | |
| "grad_norm": 0.03058604896068573, | |
| "learning_rate": 0.00018465370187724408, | |
| "loss": 0.694, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.38396299151888974, | |
| "grad_norm": 0.028347650542855263, | |
| "learning_rate": 0.0001845223824594099, | |
| "loss": 0.7373, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.3855050115651503, | |
| "grad_norm": 0.025979626923799515, | |
| "learning_rate": 0.00018439055068783966, | |
| "loss": 0.8036, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.38704703161141096, | |
| "grad_norm": 0.029867777600884438, | |
| "learning_rate": 0.0001842582073616649, | |
| "loss": 0.7655, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.38858905165767155, | |
| "grad_norm": 0.025117915123701096, | |
| "learning_rate": 0.00018412535328311814, | |
| "loss": 0.7532, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.39013107170393213, | |
| "grad_norm": 0.023947982117533684, | |
| "learning_rate": 0.00018399198925752778, | |
| "loss": 0.6967, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.39167309175019277, | |
| "grad_norm": 0.025846531614661217, | |
| "learning_rate": 0.00018385811609331352, | |
| "loss": 0.7382, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.39321511179645335, | |
| "grad_norm": 0.025034697726368904, | |
| "learning_rate": 0.00018372373460198138, | |
| "loss": 0.7282, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.39475713184271394, | |
| "grad_norm": 0.02547437883913517, | |
| "learning_rate": 0.00018358884559811856, | |
| "loss": 0.7447, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.3962991518889746, | |
| "grad_norm": 0.027032596990466118, | |
| "learning_rate": 0.0001834534498993888, | |
| "loss": 0.7395, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.39784117193523516, | |
| "grad_norm": 0.027110572904348373, | |
| "learning_rate": 0.0001833175483265273, | |
| "loss": 0.7963, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.39938319198149574, | |
| "grad_norm": 0.027663685381412506, | |
| "learning_rate": 0.00018318114170333568, | |
| "loss": 0.7893, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.4009252120277564, | |
| "grad_norm": 0.027797933667898178, | |
| "learning_rate": 0.00018304423085667714, | |
| "loss": 0.7228, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.40246723207401697, | |
| "grad_norm": 0.026281701400876045, | |
| "learning_rate": 0.0001829068166164712, | |
| "loss": 0.749, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.40400925212027755, | |
| "grad_norm": 0.024708108976483345, | |
| "learning_rate": 0.00018276889981568906, | |
| "loss": 0.6307, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.4055512721665382, | |
| "grad_norm": 0.028213316574692726, | |
| "learning_rate": 0.0001826304812903481, | |
| "loss": 0.8186, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.4070932922127988, | |
| "grad_norm": 0.024718405678868294, | |
| "learning_rate": 0.00018249156187950715, | |
| "loss": 0.7077, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.40863531225905936, | |
| "grad_norm": 0.02398741990327835, | |
| "learning_rate": 0.00018235214242526125, | |
| "loss": 0.7041, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.41017733230531994, | |
| "grad_norm": 0.03176787495613098, | |
| "learning_rate": 0.00018221222377273657, | |
| "loss": 0.7036, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.4117193523515806, | |
| "grad_norm": 0.028862686827778816, | |
| "learning_rate": 0.0001820718067700853, | |
| "loss": 0.7947, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.41326137239784116, | |
| "grad_norm": 0.026759544387459755, | |
| "learning_rate": 0.0001819308922684805, | |
| "loss": 0.7737, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.41480339244410175, | |
| "grad_norm": 0.02719755284488201, | |
| "learning_rate": 0.00018178948112211103, | |
| "loss": 0.7403, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.4163454124903624, | |
| "grad_norm": 0.024756524711847305, | |
| "learning_rate": 0.0001816475741881761, | |
| "loss": 0.6994, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.41788743253662297, | |
| "grad_norm": 0.03232420235872269, | |
| "learning_rate": 0.00018150517232688049, | |
| "loss": 0.7866, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.41942945258288356, | |
| "grad_norm": 0.027607185766100883, | |
| "learning_rate": 0.00018136227640142894, | |
| "loss": 0.7905, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.4209714726291442, | |
| "grad_norm": 0.024344706907868385, | |
| "learning_rate": 0.00018121888727802113, | |
| "loss": 0.7408, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.4225134926754048, | |
| "grad_norm": 0.025088010355830193, | |
| "learning_rate": 0.0001810750058258464, | |
| "loss": 0.737, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.42405551272166536, | |
| "grad_norm": 0.023952683433890343, | |
| "learning_rate": 0.00018093063291707847, | |
| "loss": 0.7764, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.425597532767926, | |
| "grad_norm": 0.0288414116948843, | |
| "learning_rate": 0.00018078576942687008, | |
| "loss": 0.7035, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.4271395528141866, | |
| "grad_norm": 0.02681080810725689, | |
| "learning_rate": 0.0001806404162333479, | |
| "loss": 0.739, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.42868157286044717, | |
| "grad_norm": 0.0266602523624897, | |
| "learning_rate": 0.0001804945742176069, | |
| "loss": 0.7213, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.4302235929067078, | |
| "grad_norm": 0.025282425805926323, | |
| "learning_rate": 0.00018034824426370523, | |
| "loss": 0.6807, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.4317656129529684, | |
| "grad_norm": 0.025683747604489326, | |
| "learning_rate": 0.00018020142725865888, | |
| "loss": 0.7283, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.433307632999229, | |
| "grad_norm": 0.024966144934296608, | |
| "learning_rate": 0.00018005412409243606, | |
| "loss": 0.7096, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.4348496530454896, | |
| "grad_norm": 0.027953188866376877, | |
| "learning_rate": 0.00017990633565795208, | |
| "loss": 0.8148, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.4363916730917502, | |
| "grad_norm": 0.02772989496588707, | |
| "learning_rate": 0.00017975806285106387, | |
| "loss": 0.8568, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.4379336931380108, | |
| "grad_norm": 0.028020409867167473, | |
| "learning_rate": 0.00017960930657056438, | |
| "loss": 0.6732, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.4394757131842714, | |
| "grad_norm": 0.025754399597644806, | |
| "learning_rate": 0.00017946006771817733, | |
| "loss": 0.7238, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.441017733230532, | |
| "grad_norm": 0.030171813443303108, | |
| "learning_rate": 0.00017931034719855166, | |
| "loss": 0.7493, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.4425597532767926, | |
| "grad_norm": 0.026995845139026642, | |
| "learning_rate": 0.00017916014591925605, | |
| "loss": 0.6118, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.4441017733230532, | |
| "grad_norm": 0.03541433438658714, | |
| "learning_rate": 0.00017900946479077346, | |
| "loss": 0.7243, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.4456437933693138, | |
| "grad_norm": 0.029751230031251907, | |
| "learning_rate": 0.00017885830472649553, | |
| "loss": 0.7081, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.4471858134155744, | |
| "grad_norm": 0.022569075226783752, | |
| "learning_rate": 0.00017870666664271707, | |
| "loss": 0.6488, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.448727833461835, | |
| "grad_norm": 0.03262341767549515, | |
| "learning_rate": 0.00017855455145863062, | |
| "loss": 0.7626, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.4502698535080956, | |
| "grad_norm": 0.02811555750668049, | |
| "learning_rate": 0.0001784019600963207, | |
| "loss": 0.7485, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.4518118735543562, | |
| "grad_norm": 0.02504836954176426, | |
| "learning_rate": 0.00017824889348075837, | |
| "loss": 0.7636, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.4533538936006168, | |
| "grad_norm": 0.02362634427845478, | |
| "learning_rate": 0.00017809535253979547, | |
| "loss": 0.6915, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.4548959136468774, | |
| "grad_norm": 0.029891418293118477, | |
| "learning_rate": 0.00017794133820415916, | |
| "loss": 0.7686, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.456437933693138, | |
| "grad_norm": 0.02471439354121685, | |
| "learning_rate": 0.0001777868514074462, | |
| "loss": 0.6693, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.4579799537393986, | |
| "grad_norm": 0.025612330064177513, | |
| "learning_rate": 0.00017763189308611722, | |
| "loss": 0.7364, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.45952197378565923, | |
| "grad_norm": 0.026865236461162567, | |
| "learning_rate": 0.00017747646417949113, | |
| "loss": 0.7445, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.4610639938319198, | |
| "grad_norm": 0.024343574419617653, | |
| "learning_rate": 0.00017732056562973954, | |
| "loss": 0.6271, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.4626060138781804, | |
| "grad_norm": 0.02818606235086918, | |
| "learning_rate": 0.00017716419838188077, | |
| "loss": 0.7753, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.46414803392444104, | |
| "grad_norm": 0.026821713894605637, | |
| "learning_rate": 0.00017700736338377435, | |
| "loss": 0.6976, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.4656900539707016, | |
| "grad_norm": 0.025784511119127274, | |
| "learning_rate": 0.00017685006158611516, | |
| "loss": 0.7062, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.4672320740169622, | |
| "grad_norm": 0.028515879064798355, | |
| "learning_rate": 0.00017669229394242766, | |
| "loss": 0.7909, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.46877409406322285, | |
| "grad_norm": 0.024095451459288597, | |
| "learning_rate": 0.0001765340614090603, | |
| "loss": 0.7535, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.47031611410948343, | |
| "grad_norm": 0.025953758507966995, | |
| "learning_rate": 0.0001763753649451794, | |
| "loss": 0.7623, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.471858134155744, | |
| "grad_norm": 0.03294069692492485, | |
| "learning_rate": 0.00017621620551276366, | |
| "loss": 0.8946, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.47340015420200465, | |
| "grad_norm": 0.028394997119903564, | |
| "learning_rate": 0.00017605658407659808, | |
| "loss": 0.7251, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.47494217424826524, | |
| "grad_norm": 0.025346368551254272, | |
| "learning_rate": 0.00017589650160426828, | |
| "loss": 0.7074, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.4764841942945258, | |
| "grad_norm": 0.025906400755047798, | |
| "learning_rate": 0.0001757359590661545, | |
| "loss": 0.6472, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.4780262143407864, | |
| "grad_norm": 0.02889554388821125, | |
| "learning_rate": 0.00017557495743542585, | |
| "loss": 0.715, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.47956823438704704, | |
| "grad_norm": 0.029205597937107086, | |
| "learning_rate": 0.00017541349768803428, | |
| "loss": 0.698, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.4811102544333076, | |
| "grad_norm": 0.02610400691628456, | |
| "learning_rate": 0.0001752515808027088, | |
| "loss": 0.7073, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.4826522744795682, | |
| "grad_norm": 0.023945793509483337, | |
| "learning_rate": 0.00017508920776094944, | |
| "loss": 0.6865, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.48419429452582885, | |
| "grad_norm": 0.028426503762602806, | |
| "learning_rate": 0.0001749263795470213, | |
| "loss": 0.7264, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.48573631457208943, | |
| "grad_norm": 0.02865850180387497, | |
| "learning_rate": 0.0001747630971479487, | |
| "loss": 0.7204, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.48727833461835, | |
| "grad_norm": 0.027321334928274155, | |
| "learning_rate": 0.00017459936155350908, | |
| "loss": 0.7491, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.48882035466461066, | |
| "grad_norm": 0.02754514105618, | |
| "learning_rate": 0.00017443517375622704, | |
| "loss": 0.7567, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.49036237471087124, | |
| "grad_norm": 0.028822382912039757, | |
| "learning_rate": 0.00017427053475136826, | |
| "loss": 0.7559, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.4919043947571318, | |
| "grad_norm": 0.03181014209985733, | |
| "learning_rate": 0.00017410544553693365, | |
| "loss": 0.7704, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.49344641480339246, | |
| "grad_norm": 0.023862695321440697, | |
| "learning_rate": 0.00017393990711365312, | |
| "loss": 0.6085, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.49498843484965305, | |
| "grad_norm": 0.02703220769762993, | |
| "learning_rate": 0.00017377392048497953, | |
| "loss": 0.6979, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.49653045489591363, | |
| "grad_norm": 0.025343257933855057, | |
| "learning_rate": 0.00017360748665708268, | |
| "loss": 0.7287, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.49807247494217427, | |
| "grad_norm": 0.02830134704709053, | |
| "learning_rate": 0.00017344060663884324, | |
| "loss": 0.8054, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.49961449498843485, | |
| "grad_norm": 0.025809939950704575, | |
| "learning_rate": 0.00017327328144184646, | |
| "loss": 0.704, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.5011565150346955, | |
| "grad_norm": 0.027546260505914688, | |
| "learning_rate": 0.00017310551208037626, | |
| "loss": 0.7099, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.5026985350809561, | |
| "grad_norm": 0.027951935306191444, | |
| "learning_rate": 0.00017293729957140893, | |
| "loss": 0.878, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.5042405551272167, | |
| "grad_norm": 0.026868853718042374, | |
| "learning_rate": 0.000172768644934607, | |
| "loss": 0.7714, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.5057825751734772, | |
| "grad_norm": 0.026975559070706367, | |
| "learning_rate": 0.0001725995491923131, | |
| "loss": 0.7526, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.5073245952197378, | |
| "grad_norm": 0.027235837653279305, | |
| "learning_rate": 0.0001724300133695437, | |
| "loss": 0.6515, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.5088666152659984, | |
| "grad_norm": 0.032752856612205505, | |
| "learning_rate": 0.00017226003849398294, | |
| "loss": 0.8019, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.5104086353122591, | |
| "grad_norm": 0.029344195500016212, | |
| "learning_rate": 0.0001720896255959764, | |
| "loss": 0.9296, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.5119506553585197, | |
| "grad_norm": 0.025766605511307716, | |
| "learning_rate": 0.00017191877570852483, | |
| "loss": 0.7944, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.5134926754047803, | |
| "grad_norm": 0.023067327216267586, | |
| "learning_rate": 0.0001717474898672779, | |
| "loss": 0.6528, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.5150346954510409, | |
| "grad_norm": 0.026817042380571365, | |
| "learning_rate": 0.00017157576911052796, | |
| "loss": 0.763, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.5165767154973014, | |
| "grad_norm": 0.02811489813029766, | |
| "learning_rate": 0.00017140361447920364, | |
| "loss": 0.7936, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.518118735543562, | |
| "grad_norm": 0.02597888559103012, | |
| "learning_rate": 0.00017123102701686372, | |
| "loss": 0.7217, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.5196607555898227, | |
| "grad_norm": 0.030244017019867897, | |
| "learning_rate": 0.00017105800776969055, | |
| "loss": 0.756, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.5212027756360833, | |
| "grad_norm": 0.028289398178458214, | |
| "learning_rate": 0.00017088455778648397, | |
| "loss": 0.7972, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.5227447956823439, | |
| "grad_norm": 0.02397543005645275, | |
| "learning_rate": 0.00017071067811865476, | |
| "loss": 0.6358, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.5242868157286045, | |
| "grad_norm": 0.02565479464828968, | |
| "learning_rate": 0.00017053636982021844, | |
| "loss": 0.6912, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.525828835774865, | |
| "grad_norm": 0.027768775820732117, | |
| "learning_rate": 0.00017036163394778864, | |
| "loss": 0.7997, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.5273708558211256, | |
| "grad_norm": 0.033537182956933975, | |
| "learning_rate": 0.00017018647156057096, | |
| "loss": 0.7363, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.5289128758673862, | |
| "grad_norm": 0.030843475833535194, | |
| "learning_rate": 0.00017001088372035637, | |
| "loss": 0.6977, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.5304548959136469, | |
| "grad_norm": 0.026586662977933884, | |
| "learning_rate": 0.00016983487149151486, | |
| "loss": 0.7874, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.5319969159599075, | |
| "grad_norm": 0.027003532275557518, | |
| "learning_rate": 0.00016965843594098892, | |
| "loss": 0.7895, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5335389360061681, | |
| "grad_norm": 0.02772395871579647, | |
| "learning_rate": 0.00016948157813828716, | |
| "loss": 0.7799, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.5350809560524287, | |
| "grad_norm": 0.025633979588747025, | |
| "learning_rate": 0.0001693042991554777, | |
| "loss": 0.744, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.5366229760986893, | |
| "grad_norm": 0.033604227006435394, | |
| "learning_rate": 0.00016912660006718186, | |
| "loss": 0.6707, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.5381649961449498, | |
| "grad_norm": 0.03711126372218132, | |
| "learning_rate": 0.00016894848195056746, | |
| "loss": 0.8468, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.5397070161912105, | |
| "grad_norm": 0.02382393553853035, | |
| "learning_rate": 0.00016876994588534234, | |
| "loss": 0.6884, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5412490362374711, | |
| "grad_norm": 0.02597069926559925, | |
| "learning_rate": 0.0001685909929537479, | |
| "loss": 0.7789, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.5427910562837317, | |
| "grad_norm": 0.027003685012459755, | |
| "learning_rate": 0.0001684116242405525, | |
| "loss": 0.7364, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.5443330763299923, | |
| "grad_norm": 0.028209254145622253, | |
| "learning_rate": 0.00016823184083304482, | |
| "loss": 0.8014, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.5458750963762529, | |
| "grad_norm": 0.025543801486492157, | |
| "learning_rate": 0.0001680516438210273, | |
| "loss": 0.7241, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.5474171164225135, | |
| "grad_norm": 0.024730654433369637, | |
| "learning_rate": 0.00016787103429680955, | |
| "loss": 0.6425, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.5489591364687741, | |
| "grad_norm": 0.02615622617304325, | |
| "learning_rate": 0.0001676900133552018, | |
| "loss": 0.7091, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.5505011565150347, | |
| "grad_norm": 0.025628188624978065, | |
| "learning_rate": 0.00016750858209350808, | |
| "loss": 0.848, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.5520431765612953, | |
| "grad_norm": 0.02642144076526165, | |
| "learning_rate": 0.0001673267416115198, | |
| "loss": 0.7026, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.5535851966075559, | |
| "grad_norm": 0.02542021870613098, | |
| "learning_rate": 0.00016714449301150883, | |
| "loss": 0.6338, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.5551272166538165, | |
| "grad_norm": 0.028573600575327873, | |
| "learning_rate": 0.00016696183739822108, | |
| "loss": 0.7553, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.5566692367000771, | |
| "grad_norm": 0.024142924696207047, | |
| "learning_rate": 0.00016677877587886956, | |
| "loss": 0.7327, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.5582112567463376, | |
| "grad_norm": 0.029369287192821503, | |
| "learning_rate": 0.00016659530956312788, | |
| "loss": 0.7728, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.5597532767925983, | |
| "grad_norm": 0.02711080014705658, | |
| "learning_rate": 0.00016641143956312336, | |
| "loss": 0.6843, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.5612952968388589, | |
| "grad_norm": 0.027241146191954613, | |
| "learning_rate": 0.00016622716699343033, | |
| "loss": 0.8534, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.5628373168851195, | |
| "grad_norm": 0.030265534296631813, | |
| "learning_rate": 0.0001660424929710635, | |
| "loss": 0.8579, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5643793369313801, | |
| "grad_norm": 0.029054157435894012, | |
| "learning_rate": 0.000165857418615471, | |
| "loss": 0.75, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.5659213569776407, | |
| "grad_norm": 0.029963452368974686, | |
| "learning_rate": 0.0001656719450485278, | |
| "loss": 0.7464, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.5674633770239013, | |
| "grad_norm": 0.025763841345906258, | |
| "learning_rate": 0.00016548607339452853, | |
| "loss": 0.6632, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.569005397070162, | |
| "grad_norm": 0.02374422177672386, | |
| "learning_rate": 0.00016529980478018115, | |
| "loss": 0.6546, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.5705474171164225, | |
| "grad_norm": 0.02538699097931385, | |
| "learning_rate": 0.00016511314033459994, | |
| "loss": 0.6603, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5720894371626831, | |
| "grad_norm": 0.028310047462582588, | |
| "learning_rate": 0.0001649260811892984, | |
| "loss": 0.6088, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.5736314572089437, | |
| "grad_norm": 0.02981553040444851, | |
| "learning_rate": 0.00016473862847818277, | |
| "loss": 0.8053, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.5751734772552043, | |
| "grad_norm": 0.025665050372481346, | |
| "learning_rate": 0.0001645507833375449, | |
| "loss": 0.6779, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.5767154973014649, | |
| "grad_norm": 0.030752114951610565, | |
| "learning_rate": 0.0001643625469060555, | |
| "loss": 0.6893, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.5782575173477256, | |
| "grad_norm": 0.02725459821522236, | |
| "learning_rate": 0.00016417392032475715, | |
| "loss": 0.7879, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5797995373939862, | |
| "grad_norm": 0.02713746391236782, | |
| "learning_rate": 0.00016398490473705743, | |
| "loss": 0.7578, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.5813415574402467, | |
| "grad_norm": 0.02641828916966915, | |
| "learning_rate": 0.000163795501288722, | |
| "loss": 0.7335, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.5828835774865073, | |
| "grad_norm": 0.027243638411164284, | |
| "learning_rate": 0.00016360571112786765, | |
| "loss": 0.6873, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.5844255975327679, | |
| "grad_norm": 0.028045805171132088, | |
| "learning_rate": 0.00016341553540495532, | |
| "loss": 0.7141, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.5859676175790285, | |
| "grad_norm": 0.029962563887238503, | |
| "learning_rate": 0.00016322497527278306, | |
| "loss": 0.882, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5875096376252892, | |
| "grad_norm": 0.027544857934117317, | |
| "learning_rate": 0.00016303403188647913, | |
| "loss": 0.6695, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.5890516576715498, | |
| "grad_norm": 0.02867518924176693, | |
| "learning_rate": 0.00016284270640349514, | |
| "loss": 0.8052, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.5905936777178104, | |
| "grad_norm": 0.02330535091459751, | |
| "learning_rate": 0.00016265099998359866, | |
| "loss": 0.6825, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.5921356977640709, | |
| "grad_norm": 0.030629336833953857, | |
| "learning_rate": 0.00016245891378886655, | |
| "loss": 0.8545, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.5936777178103315, | |
| "grad_norm": 0.030393701046705246, | |
| "learning_rate": 0.00016226644898367768, | |
| "loss": 0.7342, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5952197378565921, | |
| "grad_norm": 0.02500557340681553, | |
| "learning_rate": 0.000162073606734706, | |
| "loss": 0.6975, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.5967617579028527, | |
| "grad_norm": 0.031064705923199654, | |
| "learning_rate": 0.00016188038821091344, | |
| "loss": 0.7903, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.5983037779491134, | |
| "grad_norm": 0.02647087723016739, | |
| "learning_rate": 0.00016168679458354284, | |
| "loss": 0.745, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.599845797995374, | |
| "grad_norm": 0.02917948178946972, | |
| "learning_rate": 0.00016149282702611077, | |
| "loss": 0.7819, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.6013878180416345, | |
| "grad_norm": 0.02458810992538929, | |
| "learning_rate": 0.00016129848671440046, | |
| "loss": 0.6466, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.6029298380878951, | |
| "grad_norm": 0.023677226155996323, | |
| "learning_rate": 0.00016110377482645477, | |
| "loss": 0.7161, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.6044718581341557, | |
| "grad_norm": 0.02807523123919964, | |
| "learning_rate": 0.00016090869254256892, | |
| "loss": 0.7164, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.6060138781804163, | |
| "grad_norm": 0.031006982550024986, | |
| "learning_rate": 0.0001607132410452833, | |
| "loss": 0.7781, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.607555898226677, | |
| "grad_norm": 0.029372677206993103, | |
| "learning_rate": 0.00016051742151937655, | |
| "loss": 0.7034, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.6090979182729376, | |
| "grad_norm": 0.025212230160832405, | |
| "learning_rate": 0.00016032123515185797, | |
| "loss": 0.6567, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.6106399383191982, | |
| "grad_norm": 0.024775920435786247, | |
| "learning_rate": 0.00016012468313196084, | |
| "loss": 0.6319, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.6121819583654587, | |
| "grad_norm": 0.02687055990099907, | |
| "learning_rate": 0.0001599277666511347, | |
| "loss": 0.7214, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.6137239784117193, | |
| "grad_norm": 0.028431419283151627, | |
| "learning_rate": 0.0001597304869030385, | |
| "loss": 0.7815, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.6152659984579799, | |
| "grad_norm": 0.027922354638576508, | |
| "learning_rate": 0.00015953284508353317, | |
| "loss": 0.7016, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.6168080185042406, | |
| "grad_norm": 0.02737678587436676, | |
| "learning_rate": 0.00015933484239067446, | |
| "loss": 0.7507, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.6183500385505012, | |
| "grad_norm": 0.02791019156575203, | |
| "learning_rate": 0.00015913648002470563, | |
| "loss": 0.705, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.6198920585967618, | |
| "grad_norm": 0.027861539274454117, | |
| "learning_rate": 0.0001589377591880501, | |
| "loss": 0.7208, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.6214340786430224, | |
| "grad_norm": 0.02952715940773487, | |
| "learning_rate": 0.00015873868108530443, | |
| "loss": 0.7114, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.6229760986892829, | |
| "grad_norm": 0.02896735444664955, | |
| "learning_rate": 0.0001585392469232307, | |
| "loss": 0.7797, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.6245181187355435, | |
| "grad_norm": 0.02640017308294773, | |
| "learning_rate": 0.00015833945791074943, | |
| "loss": 0.7186, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.6260601387818041, | |
| "grad_norm": 0.025937926024198532, | |
| "learning_rate": 0.000158139315258932, | |
| "loss": 0.781, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.6276021588280648, | |
| "grad_norm": 0.02851933054625988, | |
| "learning_rate": 0.00015793882018099364, | |
| "loss": 0.7522, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.6291441788743254, | |
| "grad_norm": 0.02368611842393875, | |
| "learning_rate": 0.00015773797389228582, | |
| "loss": 0.719, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.630686198920586, | |
| "grad_norm": 0.026969095692038536, | |
| "learning_rate": 0.00015753677761028896, | |
| "loss": 0.7554, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.6322282189668466, | |
| "grad_norm": 0.02418413758277893, | |
| "learning_rate": 0.00015733523255460506, | |
| "loss": 0.6919, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6337702390131071, | |
| "grad_norm": 0.026073114946484566, | |
| "learning_rate": 0.0001571333399469503, | |
| "loss": 0.7059, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.6353122590593677, | |
| "grad_norm": 0.027075573801994324, | |
| "learning_rate": 0.0001569311010111476, | |
| "loss": 0.7239, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.6368542791056284, | |
| "grad_norm": 0.02724389173090458, | |
| "learning_rate": 0.00015672851697311934, | |
| "loss": 0.7615, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.638396299151889, | |
| "grad_norm": 0.026826992630958557, | |
| "learning_rate": 0.00015652558906087971, | |
| "loss": 0.8138, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.6399383191981496, | |
| "grad_norm": 0.02641242742538452, | |
| "learning_rate": 0.00015632231850452747, | |
| "loss": 0.6956, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6414803392444102, | |
| "grad_norm": 0.02495909109711647, | |
| "learning_rate": 0.00015611870653623825, | |
| "loss": 0.7546, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.6430223592906708, | |
| "grad_norm": 0.035753343254327774, | |
| "learning_rate": 0.00015591475439025745, | |
| "loss": 0.7257, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.6445643793369313, | |
| "grad_norm": 0.03195042535662651, | |
| "learning_rate": 0.00015571046330289237, | |
| "loss": 0.693, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.646106399383192, | |
| "grad_norm": 0.027049189433455467, | |
| "learning_rate": 0.00015550583451250503, | |
| "loss": 0.7602, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.6476484194294526, | |
| "grad_norm": 0.027553152292966843, | |
| "learning_rate": 0.00015530086925950434, | |
| "loss": 0.7723, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6491904394757132, | |
| "grad_norm": 0.025219090282917023, | |
| "learning_rate": 0.00015509556878633894, | |
| "loss": 0.7011, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.6507324595219738, | |
| "grad_norm": 0.029802288860082626, | |
| "learning_rate": 0.00015488993433748944, | |
| "loss": 0.6898, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.6522744795682344, | |
| "grad_norm": 0.02772880345582962, | |
| "learning_rate": 0.00015468396715946083, | |
| "loss": 0.7399, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.653816499614495, | |
| "grad_norm": 0.025485830381512642, | |
| "learning_rate": 0.00015447766850077517, | |
| "loss": 0.6622, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.6553585196607556, | |
| "grad_norm": 0.027489742264151573, | |
| "learning_rate": 0.00015427103961196376, | |
| "loss": 0.8027, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.6569005397070162, | |
| "grad_norm": 0.026778720319271088, | |
| "learning_rate": 0.00015406408174555976, | |
| "loss": 0.7804, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.6584425597532768, | |
| "grad_norm": 0.027850337326526642, | |
| "learning_rate": 0.00015385679615609042, | |
| "loss": 0.7645, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.6599845797995374, | |
| "grad_norm": 0.02659332938492298, | |
| "learning_rate": 0.00015364918410006967, | |
| "loss": 0.7609, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.661526599845798, | |
| "grad_norm": 0.0292272437363863, | |
| "learning_rate": 0.0001534412468359903, | |
| "loss": 0.7788, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.6630686198920586, | |
| "grad_norm": 0.02392621338367462, | |
| "learning_rate": 0.00015323298562431648, | |
| "loss": 0.675, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6646106399383191, | |
| "grad_norm": 0.030261410400271416, | |
| "learning_rate": 0.00015302440172747605, | |
| "loss": 0.6585, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.6661526599845798, | |
| "grad_norm": 0.031531739979982376, | |
| "learning_rate": 0.00015281549640985294, | |
| "loss": 0.8002, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.6676946800308404, | |
| "grad_norm": 0.025663699954748154, | |
| "learning_rate": 0.00015260627093777936, | |
| "loss": 0.7068, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.669236700077101, | |
| "grad_norm": 0.02657734416425228, | |
| "learning_rate": 0.00015239672657952832, | |
| "loss": 0.7369, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.6707787201233616, | |
| "grad_norm": 0.027592379599809647, | |
| "learning_rate": 0.0001521868646053058, | |
| "loss": 0.7245, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6723207401696222, | |
| "grad_norm": 0.02645149454474449, | |
| "learning_rate": 0.00015197668628724303, | |
| "loss": 0.718, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.6738627602158828, | |
| "grad_norm": 0.02535802498459816, | |
| "learning_rate": 0.00015176619289938888, | |
| "loss": 0.6926, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.6754047802621435, | |
| "grad_norm": 0.02912677638232708, | |
| "learning_rate": 0.00015155538571770218, | |
| "loss": 0.8564, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.676946800308404, | |
| "grad_norm": 0.023175543174147606, | |
| "learning_rate": 0.00015134426602004375, | |
| "loss": 0.6568, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.6784888203546646, | |
| "grad_norm": 0.02832154743373394, | |
| "learning_rate": 0.00015113283508616895, | |
| "loss": 0.6571, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6800308404009252, | |
| "grad_norm": 0.029950594529509544, | |
| "learning_rate": 0.0001509210941977196, | |
| "loss": 0.8564, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.6815728604471858, | |
| "grad_norm": 0.02448093518614769, | |
| "learning_rate": 0.00015070904463821658, | |
| "loss": 0.6936, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.6831148804934464, | |
| "grad_norm": 0.027585268020629883, | |
| "learning_rate": 0.00015049668769305172, | |
| "loss": 0.7991, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.6846569005397071, | |
| "grad_norm": 0.025705596432089806, | |
| "learning_rate": 0.00015028402464948022, | |
| "loss": 0.7187, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.6861989205859677, | |
| "grad_norm": 0.02810623310506344, | |
| "learning_rate": 0.00015007105679661276, | |
| "loss": 0.7315, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6877409406322282, | |
| "grad_norm": 0.02478802390396595, | |
| "learning_rate": 0.0001498577854254076, | |
| "loss": 0.7201, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.6892829606784888, | |
| "grad_norm": 0.03222353756427765, | |
| "learning_rate": 0.0001496442118286631, | |
| "loss": 0.796, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.6908249807247494, | |
| "grad_norm": 0.025557026267051697, | |
| "learning_rate": 0.00014943033730100935, | |
| "loss": 0.6066, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.69236700077101, | |
| "grad_norm": 0.02581370249390602, | |
| "learning_rate": 0.00014921616313890072, | |
| "loss": 0.7186, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.6939090208172706, | |
| "grad_norm": 0.025746649131178856, | |
| "learning_rate": 0.00014900169064060802, | |
| "loss": 0.703, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6954510408635313, | |
| "grad_norm": 0.03065100871026516, | |
| "learning_rate": 0.00014878692110621028, | |
| "loss": 0.7724, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.6969930609097919, | |
| "grad_norm": 0.03629877045750618, | |
| "learning_rate": 0.00014857185583758723, | |
| "loss": 0.7703, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.6985350809560524, | |
| "grad_norm": 0.025341391563415527, | |
| "learning_rate": 0.0001483564961384112, | |
| "loss": 0.6492, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.700077101002313, | |
| "grad_norm": 0.03427153080701828, | |
| "learning_rate": 0.0001481408433141394, | |
| "loss": 0.7148, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.7016191210485736, | |
| "grad_norm": 0.023825203999876976, | |
| "learning_rate": 0.0001479248986720057, | |
| "loss": 0.6197, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.7031611410948342, | |
| "grad_norm": 0.026944074779748917, | |
| "learning_rate": 0.00014770866352101307, | |
| "loss": 0.7273, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.7047031611410949, | |
| "grad_norm": 0.028203219175338745, | |
| "learning_rate": 0.00014749213917192538, | |
| "loss": 0.7741, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.7062451811873555, | |
| "grad_norm": 0.024622568860650063, | |
| "learning_rate": 0.00014727532693725962, | |
| "loss": 0.6212, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.707787201233616, | |
| "grad_norm": 0.023979736492037773, | |
| "learning_rate": 0.00014705822813127777, | |
| "loss": 0.6863, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.7093292212798766, | |
| "grad_norm": 0.027042685076594353, | |
| "learning_rate": 0.00014684084406997903, | |
| "loss": 0.7746, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.7108712413261372, | |
| "grad_norm": 0.026160864159464836, | |
| "learning_rate": 0.00014662317607109168, | |
| "loss": 0.727, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.7124132613723978, | |
| "grad_norm": 0.035195063799619675, | |
| "learning_rate": 0.0001464052254540652, | |
| "loss": 0.9004, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.7139552814186585, | |
| "grad_norm": 0.02714708261191845, | |
| "learning_rate": 0.00014618699354006223, | |
| "loss": 0.6853, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.7154973014649191, | |
| "grad_norm": 0.027736373245716095, | |
| "learning_rate": 0.0001459684816519505, | |
| "loss": 0.7095, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.7170393215111797, | |
| "grad_norm": 0.032326798886060715, | |
| "learning_rate": 0.000145749691114295, | |
| "loss": 0.7256, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.7185813415574402, | |
| "grad_norm": 0.028024908155202866, | |
| "learning_rate": 0.00014553062325334967, | |
| "loss": 0.7213, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.7201233616037008, | |
| "grad_norm": 0.026767205446958542, | |
| "learning_rate": 0.00014531127939704965, | |
| "loss": 0.6619, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.7216653816499614, | |
| "grad_norm": 0.025469880551099777, | |
| "learning_rate": 0.00014509166087500302, | |
| "loss": 0.6174, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.723207401696222, | |
| "grad_norm": 0.03378410264849663, | |
| "learning_rate": 0.00014487176901848285, | |
| "loss": 0.6447, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.7247494217424827, | |
| "grad_norm": 0.026924695819616318, | |
| "learning_rate": 0.00014465160516041904, | |
| "loss": 0.7116, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7262914417887433, | |
| "grad_norm": 0.029700160026550293, | |
| "learning_rate": 0.00014443117063539038, | |
| "loss": 0.811, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.7278334618350039, | |
| "grad_norm": 0.030913611873984337, | |
| "learning_rate": 0.00014421046677961626, | |
| "loss": 0.77, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.7293754818812644, | |
| "grad_norm": 0.029507668688893318, | |
| "learning_rate": 0.0001439894949309489, | |
| "loss": 0.7813, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.730917501927525, | |
| "grad_norm": 0.028620922937989235, | |
| "learning_rate": 0.00014376825642886472, | |
| "loss": 0.7401, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.7324595219737856, | |
| "grad_norm": 0.025677144527435303, | |
| "learning_rate": 0.0001435467526144568, | |
| "loss": 0.7408, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7340015420200463, | |
| "grad_norm": 0.025129586458206177, | |
| "learning_rate": 0.00014332498483042637, | |
| "loss": 0.6585, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.7355435620663069, | |
| "grad_norm": 0.026812126860022545, | |
| "learning_rate": 0.0001431029544210747, | |
| "loss": 0.8063, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.7370855821125675, | |
| "grad_norm": 0.02705306001007557, | |
| "learning_rate": 0.0001428806627322952, | |
| "loss": 0.7655, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.7386276021588281, | |
| "grad_norm": 0.026533039286732674, | |
| "learning_rate": 0.0001426581111115649, | |
| "loss": 0.6685, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.7401696222050886, | |
| "grad_norm": 0.030538393184542656, | |
| "learning_rate": 0.00014243530090793667, | |
| "loss": 0.7389, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7417116422513492, | |
| "grad_norm": 0.027102958410978317, | |
| "learning_rate": 0.00014221223347203068, | |
| "loss": 0.726, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.7432536622976099, | |
| "grad_norm": 0.024465948343276978, | |
| "learning_rate": 0.00014198891015602646, | |
| "loss": 0.6379, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.7447956823438705, | |
| "grad_norm": 0.027429422363638878, | |
| "learning_rate": 0.00014176533231365464, | |
| "loss": 0.726, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.7463377023901311, | |
| "grad_norm": 0.02289111353456974, | |
| "learning_rate": 0.00014154150130018866, | |
| "loss": 0.6354, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.7478797224363917, | |
| "grad_norm": 0.027374420315027237, | |
| "learning_rate": 0.00014131741847243665, | |
| "loss": 0.8206, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7494217424826523, | |
| "grad_norm": 0.029361480847001076, | |
| "learning_rate": 0.0001410930851887332, | |
| "loss": 0.6994, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.7509637625289128, | |
| "grad_norm": 0.027077002450823784, | |
| "learning_rate": 0.00014086850280893107, | |
| "loss": 0.7133, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.7525057825751735, | |
| "grad_norm": 0.028823737055063248, | |
| "learning_rate": 0.000140643672694393, | |
| "loss": 0.7975, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.7540478026214341, | |
| "grad_norm": 0.02851509116590023, | |
| "learning_rate": 0.0001404185962079834, | |
| "loss": 0.7497, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.7555898226676947, | |
| "grad_norm": 0.025098523125052452, | |
| "learning_rate": 0.00014019327471406022, | |
| "loss": 0.6978, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7571318427139553, | |
| "grad_norm": 0.03084149770438671, | |
| "learning_rate": 0.00013996770957846644, | |
| "loss": 0.7607, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.7586738627602159, | |
| "grad_norm": 0.02702442556619644, | |
| "learning_rate": 0.00013974190216852202, | |
| "loss": 0.7639, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.7602158828064765, | |
| "grad_norm": 0.033685747534036636, | |
| "learning_rate": 0.00013951585385301555, | |
| "loss": 0.7793, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.761757902852737, | |
| "grad_norm": 0.028003569692373276, | |
| "learning_rate": 0.00013928956600219592, | |
| "loss": 0.755, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.7632999228989977, | |
| "grad_norm": 0.026269137859344482, | |
| "learning_rate": 0.00013906303998776392, | |
| "loss": 0.7896, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7648419429452583, | |
| "grad_norm": 0.027992503717541695, | |
| "learning_rate": 0.0001388362771828642, | |
| "loss": 0.6472, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.7663839629915189, | |
| "grad_norm": 0.028174640610814095, | |
| "learning_rate": 0.00013860927896207665, | |
| "loss": 0.765, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.7679259830377795, | |
| "grad_norm": 0.04180079326033592, | |
| "learning_rate": 0.0001383820467014082, | |
| "loss": 0.7479, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.7694680030840401, | |
| "grad_norm": 0.028985602781176567, | |
| "learning_rate": 0.00013815458177828454, | |
| "loss": 0.7121, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.7710100231303006, | |
| "grad_norm": 0.026408828794956207, | |
| "learning_rate": 0.00013792688557154166, | |
| "loss": 0.7411, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7725520431765613, | |
| "grad_norm": 0.027537405490875244, | |
| "learning_rate": 0.00013769895946141752, | |
| "loss": 0.7628, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.7740940632228219, | |
| "grad_norm": 0.028669610619544983, | |
| "learning_rate": 0.00013747080482954377, | |
| "loss": 0.7698, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.7756360832690825, | |
| "grad_norm": 0.027917267754673958, | |
| "learning_rate": 0.00013724242305893715, | |
| "loss": 0.8452, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.7771781033153431, | |
| "grad_norm": 0.031637243926525116, | |
| "learning_rate": 0.00013701381553399145, | |
| "loss": 0.7457, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.7787201233616037, | |
| "grad_norm": 0.027903905138373375, | |
| "learning_rate": 0.00013678498364046876, | |
| "loss": 0.7475, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7802621434078643, | |
| "grad_norm": 0.03381239250302315, | |
| "learning_rate": 0.00013655592876549134, | |
| "loss": 0.836, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.781804163454125, | |
| "grad_norm": 0.0790882334113121, | |
| "learning_rate": 0.0001363266522975331, | |
| "loss": 0.5842, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.7833461835003855, | |
| "grad_norm": 0.03034863993525505, | |
| "learning_rate": 0.00013609715562641115, | |
| "loss": 0.8256, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.7848882035466461, | |
| "grad_norm": 0.029499804601073265, | |
| "learning_rate": 0.0001358674401432774, | |
| "loss": 0.7145, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.7864302235929067, | |
| "grad_norm": 0.0285016018897295, | |
| "learning_rate": 0.00013563750724061025, | |
| "loss": 0.6712, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7879722436391673, | |
| "grad_norm": 0.02717653289437294, | |
| "learning_rate": 0.0001354073583122059, | |
| "loss": 0.6807, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.7895142636854279, | |
| "grad_norm": 0.023634063079953194, | |
| "learning_rate": 0.00013517699475317017, | |
| "loss": 0.6677, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.7910562837316885, | |
| "grad_norm": 0.02920868992805481, | |
| "learning_rate": 0.00013494641795990986, | |
| "loss": 0.7865, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.7925983037779492, | |
| "grad_norm": 0.040133338421583176, | |
| "learning_rate": 0.00013471562933012432, | |
| "loss": 0.7786, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.7941403238242097, | |
| "grad_norm": 0.028838159516453743, | |
| "learning_rate": 0.00013448463026279704, | |
| "loss": 0.7689, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.7956823438704703, | |
| "grad_norm": 0.02688550017774105, | |
| "learning_rate": 0.00013425342215818718, | |
| "loss": 0.7659, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.7972243639167309, | |
| "grad_norm": 0.027155300602316856, | |
| "learning_rate": 0.0001340220064178209, | |
| "loss": 0.7353, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.7987663839629915, | |
| "grad_norm": 0.03566644340753555, | |
| "learning_rate": 0.00013379038444448306, | |
| "loss": 0.827, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.8003084040092521, | |
| "grad_norm": 0.0266974326223135, | |
| "learning_rate": 0.00013355855764220868, | |
| "loss": 0.7693, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.8018504240555128, | |
| "grad_norm": 0.026248447597026825, | |
| "learning_rate": 0.00013332652741627446, | |
| "loss": 0.7659, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.8033924441017734, | |
| "grad_norm": 0.030487187206745148, | |
| "learning_rate": 0.00013309429517318998, | |
| "loss": 0.7244, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.8049344641480339, | |
| "grad_norm": 0.031918346881866455, | |
| "learning_rate": 0.00013286186232068972, | |
| "loss": 0.6111, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.8064764841942945, | |
| "grad_norm": 0.03194589167833328, | |
| "learning_rate": 0.00013262923026772388, | |
| "loss": 0.7521, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.8080185042405551, | |
| "grad_norm": 0.029390091076493263, | |
| "learning_rate": 0.00013239640042445036, | |
| "loss": 0.7689, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.8095605242868157, | |
| "grad_norm": 0.025920424610376358, | |
| "learning_rate": 0.000132163374202226, | |
| "loss": 0.7102, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.8111025443330764, | |
| "grad_norm": 0.02453307807445526, | |
| "learning_rate": 0.000131930153013598, | |
| "loss": 0.712, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.812644564379337, | |
| "grad_norm": 0.035026635974645615, | |
| "learning_rate": 0.0001316967382722954, | |
| "loss": 0.789, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.8141865844255975, | |
| "grad_norm": 0.02895597368478775, | |
| "learning_rate": 0.0001314631313932205, | |
| "loss": 0.8087, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.8157286044718581, | |
| "grad_norm": 0.02956031821668148, | |
| "learning_rate": 0.00013122933379244034, | |
| "loss": 0.7514, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.8172706245181187, | |
| "grad_norm": 0.027792761102318764, | |
| "learning_rate": 0.00013099534688717804, | |
| "loss": 0.7843, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.8188126445643793, | |
| "grad_norm": 0.023245403543114662, | |
| "learning_rate": 0.00013076117209580418, | |
| "loss": 0.6878, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.8203546646106399, | |
| "grad_norm": 0.030602406710386276, | |
| "learning_rate": 0.00013052681083782836, | |
| "loss": 0.6504, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.8218966846569006, | |
| "grad_norm": 0.0250953808426857, | |
| "learning_rate": 0.00013029226453389043, | |
| "loss": 0.6255, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.8234387047031612, | |
| "grad_norm": 0.02791382372379303, | |
| "learning_rate": 0.00013005753460575194, | |
| "loss": 0.7252, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.8249807247494217, | |
| "grad_norm": 0.025774458423256874, | |
| "learning_rate": 0.0001298226224762876, | |
| "loss": 0.7252, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.8265227447956823, | |
| "grad_norm": 0.026806412264704704, | |
| "learning_rate": 0.00012958752956947645, | |
| "loss": 0.7702, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.8280647648419429, | |
| "grad_norm": 0.02904200740158558, | |
| "learning_rate": 0.00012935225731039348, | |
| "loss": 0.7573, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.8296067848882035, | |
| "grad_norm": 0.03018496371805668, | |
| "learning_rate": 0.00012911680712520082, | |
| "loss": 0.6854, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.8311488049344642, | |
| "grad_norm": 0.028921302407979965, | |
| "learning_rate": 0.00012888118044113912, | |
| "loss": 0.7372, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.8326908249807248, | |
| "grad_norm": 0.02912386879324913, | |
| "learning_rate": 0.00012864537868651892, | |
| "loss": 0.864, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8342328450269854, | |
| "grad_norm": 0.03150784596800804, | |
| "learning_rate": 0.00012840940329071211, | |
| "loss": 0.6846, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.8357748650732459, | |
| "grad_norm": 0.026627201586961746, | |
| "learning_rate": 0.00012817325568414297, | |
| "loss": 0.6596, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.8373168851195065, | |
| "grad_norm": 0.026762191206216812, | |
| "learning_rate": 0.00012793693729827983, | |
| "loss": 0.7859, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.8388589051657671, | |
| "grad_norm": 0.0270906463265419, | |
| "learning_rate": 0.00012770044956562611, | |
| "loss": 0.76, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.8404009252120278, | |
| "grad_norm": 0.029391184449195862, | |
| "learning_rate": 0.0001274637939197119, | |
| "loss": 0.7153, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.8419429452582884, | |
| "grad_norm": 0.028473293408751488, | |
| "learning_rate": 0.00012722697179508508, | |
| "loss": 0.7083, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.843484965304549, | |
| "grad_norm": 0.028644869104027748, | |
| "learning_rate": 0.00012698998462730264, | |
| "loss": 0.7531, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.8450269853508096, | |
| "grad_norm": 0.0290384441614151, | |
| "learning_rate": 0.00012675283385292212, | |
| "loss": 0.8323, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.8465690053970701, | |
| "grad_norm": 0.027428725734353065, | |
| "learning_rate": 0.00012651552090949263, | |
| "loss": 0.783, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.8481110254433307, | |
| "grad_norm": 0.0316435806453228, | |
| "learning_rate": 0.00012627804723554651, | |
| "loss": 0.7303, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8496530454895914, | |
| "grad_norm": 0.031257808208465576, | |
| "learning_rate": 0.00012604041427059036, | |
| "loss": 0.6641, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.851195065535852, | |
| "grad_norm": 0.031232839450240135, | |
| "learning_rate": 0.00012580262345509622, | |
| "loss": 0.8266, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.8527370855821126, | |
| "grad_norm": 0.028075871989130974, | |
| "learning_rate": 0.00012556467623049312, | |
| "loss": 0.6852, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.8542791056283732, | |
| "grad_norm": 0.0269178319722414, | |
| "learning_rate": 0.0001253265740391582, | |
| "loss": 0.6774, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.8558211256746338, | |
| "grad_norm": 0.033746860921382904, | |
| "learning_rate": 0.00012508831832440794, | |
| "loss": 0.6704, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.8573631457208943, | |
| "grad_norm": 0.02507774904370308, | |
| "learning_rate": 0.0001248499105304894, | |
| "loss": 0.6364, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.8589051657671549, | |
| "grad_norm": 0.027239350602030754, | |
| "learning_rate": 0.00012461135210257155, | |
| "loss": 0.7503, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.8604471858134156, | |
| "grad_norm": 0.031841401010751724, | |
| "learning_rate": 0.00012437264448673647, | |
| "loss": 0.7201, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.8619892058596762, | |
| "grad_norm": 0.025972798466682434, | |
| "learning_rate": 0.00012413378912997058, | |
| "loss": 0.7265, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.8635312259059368, | |
| "grad_norm": 0.02815602719783783, | |
| "learning_rate": 0.00012389478748015583, | |
| "loss": 0.6922, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8650732459521974, | |
| "grad_norm": 0.03288137540221214, | |
| "learning_rate": 0.00012365564098606102, | |
| "loss": 0.6516, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.866615265998458, | |
| "grad_norm": 0.02655138447880745, | |
| "learning_rate": 0.00012341635109733293, | |
| "loss": 0.6192, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.8681572860447185, | |
| "grad_norm": 0.029187412932515144, | |
| "learning_rate": 0.00012317691926448754, | |
| "loss": 0.8307, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.8696993060909792, | |
| "grad_norm": 0.029966147616505623, | |
| "learning_rate": 0.00012293734693890132, | |
| "loss": 0.7285, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.8712413261372398, | |
| "grad_norm": 0.02818044275045395, | |
| "learning_rate": 0.0001226976355728023, | |
| "loss": 0.7171, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.8727833461835004, | |
| "grad_norm": 0.02721909061074257, | |
| "learning_rate": 0.00012245778661926137, | |
| "loss": 0.6913, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.874325366229761, | |
| "grad_norm": 0.030929675325751305, | |
| "learning_rate": 0.0001222178015321835, | |
| "loss": 0.8006, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.8758673862760216, | |
| "grad_norm": 0.026720581576228142, | |
| "learning_rate": 0.00012197768176629876, | |
| "loss": 0.7348, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.8774094063222821, | |
| "grad_norm": 0.02792746014893055, | |
| "learning_rate": 0.00012173742877715373, | |
| "loss": 0.786, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.8789514263685428, | |
| "grad_norm": 0.025372346863150597, | |
| "learning_rate": 0.00012149704402110243, | |
| "loss": 0.6832, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.8804934464148034, | |
| "grad_norm": 0.025779495015740395, | |
| "learning_rate": 0.00012125652895529766, | |
| "loss": 0.7465, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.882035466461064, | |
| "grad_norm": 0.028759067878127098, | |
| "learning_rate": 0.00012101588503768224, | |
| "loss": 0.7755, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.8835774865073246, | |
| "grad_norm": 0.0319170206785202, | |
| "learning_rate": 0.00012077511372697985, | |
| "loss": 0.7758, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.8851195065535852, | |
| "grad_norm": 0.0287742018699646, | |
| "learning_rate": 0.00012053421648268662, | |
| "loss": 0.7556, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.8866615265998458, | |
| "grad_norm": 0.027272436767816544, | |
| "learning_rate": 0.00012029319476506182, | |
| "loss": 0.6341, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.8882035466461063, | |
| "grad_norm": 0.02786344476044178, | |
| "learning_rate": 0.00012005205003511948, | |
| "loss": 0.7248, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.889745566692367, | |
| "grad_norm": 0.023950345814228058, | |
| "learning_rate": 0.00011981078375461915, | |
| "loss": 0.6907, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.8912875867386276, | |
| "grad_norm": 0.0248698852956295, | |
| "learning_rate": 0.00011956939738605721, | |
| "loss": 0.6612, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.8928296067848882, | |
| "grad_norm": 0.02722037211060524, | |
| "learning_rate": 0.00011932789239265802, | |
| "loss": 0.6179, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.8943716268311488, | |
| "grad_norm": 0.02561650238931179, | |
| "learning_rate": 0.00011908627023836503, | |
| "loss": 0.7478, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.8959136468774094, | |
| "grad_norm": 0.027791699394583702, | |
| "learning_rate": 0.00011884453238783185, | |
| "loss": 0.7803, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.89745566692367, | |
| "grad_norm": 0.033503565937280655, | |
| "learning_rate": 0.00011860268030641338, | |
| "loss": 0.7978, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.8989976869699307, | |
| "grad_norm": 0.0275451447814703, | |
| "learning_rate": 0.00011836071546015703, | |
| "loss": 0.7458, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.9005397070161912, | |
| "grad_norm": 0.02638075314462185, | |
| "learning_rate": 0.00011811863931579377, | |
| "loss": 0.5823, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.9020817270624518, | |
| "grad_norm": 0.029536547139286995, | |
| "learning_rate": 0.00011787645334072913, | |
| "loss": 0.829, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.9036237471087124, | |
| "grad_norm": 0.029731806367635727, | |
| "learning_rate": 0.0001176341590030345, | |
| "loss": 0.7553, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.905165767154973, | |
| "grad_norm": 0.02816937118768692, | |
| "learning_rate": 0.00011739175777143812, | |
| "loss": 0.7094, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.9067077872012336, | |
| "grad_norm": 0.026959970593452454, | |
| "learning_rate": 0.00011714925111531619, | |
| "loss": 0.7368, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.9082498072474943, | |
| "grad_norm": 0.027997490018606186, | |
| "learning_rate": 0.0001169066405046839, | |
| "loss": 0.7864, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.9097918272937549, | |
| "grad_norm": 0.025101030245423317, | |
| "learning_rate": 0.00011666392741018675, | |
| "loss": 0.6445, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.9113338473400154, | |
| "grad_norm": 0.038789402693510056, | |
| "learning_rate": 0.00011642111330309129, | |
| "loss": 0.7104, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.912875867386276, | |
| "grad_norm": 0.0264846533536911, | |
| "learning_rate": 0.0001161781996552765, | |
| "loss": 0.7034, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.9144178874325366, | |
| "grad_norm": 0.03629022464156151, | |
| "learning_rate": 0.00011593518793922468, | |
| "loss": 0.8121, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.9159599074787972, | |
| "grad_norm": 0.025432435795664787, | |
| "learning_rate": 0.00011569207962801263, | |
| "loss": 0.6726, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.9175019275250579, | |
| "grad_norm": 0.02553374320268631, | |
| "learning_rate": 0.00011544887619530275, | |
| "loss": 0.659, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.9190439475713185, | |
| "grad_norm": 0.02498779632151127, | |
| "learning_rate": 0.0001152055791153339, | |
| "loss": 0.5868, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.920585967617579, | |
| "grad_norm": 0.029489582404494286, | |
| "learning_rate": 0.00011496218986291273, | |
| "loss": 0.673, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.9221279876638396, | |
| "grad_norm": 0.029334766790270805, | |
| "learning_rate": 0.00011471870991340459, | |
| "loss": 0.8011, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.9236700077101002, | |
| "grad_norm": 0.0274631530046463, | |
| "learning_rate": 0.00011447514074272451, | |
| "loss": 0.721, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.9252120277563608, | |
| "grad_norm": 0.028073586523532867, | |
| "learning_rate": 0.00011423148382732853, | |
| "loss": 0.839, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.9267540478026214, | |
| "grad_norm": 0.028757184743881226, | |
| "learning_rate": 0.00011398774064420443, | |
| "loss": 0.7409, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.9282960678488821, | |
| "grad_norm": 0.026308685541152954, | |
| "learning_rate": 0.00011374391267086302, | |
| "loss": 0.7517, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.9298380878951427, | |
| "grad_norm": 0.027558207511901855, | |
| "learning_rate": 0.00011350000138532902, | |
| "loss": 0.7041, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.9313801079414032, | |
| "grad_norm": 0.02995571680366993, | |
| "learning_rate": 0.0001132560082661322, | |
| "loss": 0.842, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.9329221279876638, | |
| "grad_norm": 0.028026850894093513, | |
| "learning_rate": 0.00011301193479229841, | |
| "loss": 0.7639, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.9344641480339244, | |
| "grad_norm": 0.026926511898636818, | |
| "learning_rate": 0.00011276778244334055, | |
| "loss": 0.6315, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.936006168080185, | |
| "grad_norm": 0.031490955501794815, | |
| "learning_rate": 0.00011252355269924963, | |
| "loss": 0.5844, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.9375481881264457, | |
| "grad_norm": 0.028631744906306267, | |
| "learning_rate": 0.00011227924704048585, | |
| "loss": 0.6431, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.9390902081727063, | |
| "grad_norm": 0.029599271714687347, | |
| "learning_rate": 0.00011203486694796957, | |
| "loss": 0.7085, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.9406322282189669, | |
| "grad_norm": 0.027870824560523033, | |
| "learning_rate": 0.00011179041390307235, | |
| "loss": 0.7381, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.9421742482652274, | |
| "grad_norm": 0.030139662325382233, | |
| "learning_rate": 0.00011154588938760794, | |
| "loss": 0.7299, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.943716268311488, | |
| "grad_norm": 0.031028704717755318, | |
| "learning_rate": 0.00011130129488382341, | |
| "loss": 0.747, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.9452582883577486, | |
| "grad_norm": 0.02865663915872574, | |
| "learning_rate": 0.00011105663187438997, | |
| "loss": 0.7025, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.9468003084040093, | |
| "grad_norm": 0.029388774186372757, | |
| "learning_rate": 0.00011081190184239419, | |
| "loss": 0.6645, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.9483423284502699, | |
| "grad_norm": 0.023502621799707413, | |
| "learning_rate": 0.00011056710627132884, | |
| "loss": 0.6455, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9498843484965305, | |
| "grad_norm": 0.02663263864815235, | |
| "learning_rate": 0.00011032224664508405, | |
| "loss": 0.7677, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.9514263685427911, | |
| "grad_norm": 0.02990710362792015, | |
| "learning_rate": 0.00011007732444793814, | |
| "loss": 0.7017, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.9529683885890516, | |
| "grad_norm": 0.029992980882525444, | |
| "learning_rate": 0.00010983234116454886, | |
| "loss": 0.7969, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.9545104086353122, | |
| "grad_norm": 0.03714431822299957, | |
| "learning_rate": 0.00010958729827994404, | |
| "loss": 0.7646, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.9560524286815728, | |
| "grad_norm": 0.030639756470918655, | |
| "learning_rate": 0.00010934219727951301, | |
| "loss": 0.8926, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9575944487278335, | |
| "grad_norm": 0.030243555083870888, | |
| "learning_rate": 0.00010909703964899729, | |
| "loss": 0.6676, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.9591364687740941, | |
| "grad_norm": 0.029893988743424416, | |
| "learning_rate": 0.00010885182687448161, | |
| "loss": 0.7238, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.9606784888203547, | |
| "grad_norm": 0.02677508443593979, | |
| "learning_rate": 0.00010860656044238511, | |
| "loss": 0.7378, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.9622205088666153, | |
| "grad_norm": 0.02502857707440853, | |
| "learning_rate": 0.00010836124183945208, | |
| "loss": 0.7129, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.9637625289128758, | |
| "grad_norm": 0.02673465758562088, | |
| "learning_rate": 0.00010811587255274313, | |
| "loss": 0.7046, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9653045489591364, | |
| "grad_norm": 0.030728284269571304, | |
| "learning_rate": 0.00010787045406962607, | |
| "loss": 0.752, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.9668465690053971, | |
| "grad_norm": 0.02698132023215294, | |
| "learning_rate": 0.00010762498787776687, | |
| "loss": 0.6365, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.9683885890516577, | |
| "grad_norm": 0.03070535510778427, | |
| "learning_rate": 0.0001073794754651208, | |
| "loss": 0.7465, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.9699306090979183, | |
| "grad_norm": 0.026031676679849625, | |
| "learning_rate": 0.00010713391831992323, | |
| "loss": 0.6991, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.9714726291441789, | |
| "grad_norm": 0.02689627930521965, | |
| "learning_rate": 0.00010688831793068078, | |
| "loss": 0.6468, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9730146491904395, | |
| "grad_norm": 0.028856465592980385, | |
| "learning_rate": 0.00010664267578616207, | |
| "loss": 0.7051, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.9745566692367, | |
| "grad_norm": 0.027791066095232964, | |
| "learning_rate": 0.00010639699337538898, | |
| "loss": 0.7247, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.9760986892829607, | |
| "grad_norm": 0.02929227240383625, | |
| "learning_rate": 0.00010615127218762733, | |
| "loss": 0.6994, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.9776407093292213, | |
| "grad_norm": 0.0322168804705143, | |
| "learning_rate": 0.0001059055137123781, | |
| "loss": 0.753, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.9791827293754819, | |
| "grad_norm": 0.02872268296778202, | |
| "learning_rate": 0.00010565971943936825, | |
| "loss": 0.7451, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9807247494217425, | |
| "grad_norm": 0.03104063868522644, | |
| "learning_rate": 0.00010541389085854176, | |
| "loss": 0.7482, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.9822667694680031, | |
| "grad_norm": 0.026641814038157463, | |
| "learning_rate": 0.00010516802946005058, | |
| "loss": 0.7034, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.9838087895142636, | |
| "grad_norm": 0.027356311678886414, | |
| "learning_rate": 0.00010492213673424553, | |
| "loss": 0.6459, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.9853508095605242, | |
| "grad_norm": 0.031015669927001, | |
| "learning_rate": 0.00010467621417166745, | |
| "loss": 0.8185, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.9868928296067849, | |
| "grad_norm": 0.024331575259566307, | |
| "learning_rate": 0.00010443026326303789, | |
| "loss": 0.64, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9884348496530455, | |
| "grad_norm": 0.026257265359163284, | |
| "learning_rate": 0.00010418428549925032, | |
| "loss": 0.6634, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.9899768696993061, | |
| "grad_norm": 0.026038171723484993, | |
| "learning_rate": 0.00010393828237136107, | |
| "loss": 0.728, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.9915188897455667, | |
| "grad_norm": 0.028959324583411217, | |
| "learning_rate": 0.00010369225537058002, | |
| "loss": 0.7315, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.9930609097918273, | |
| "grad_norm": 0.026242226362228394, | |
| "learning_rate": 0.00010344620598826198, | |
| "loss": 0.7096, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.9946029298380878, | |
| "grad_norm": 0.029856307432055473, | |
| "learning_rate": 0.00010320013571589726, | |
| "loss": 0.7052, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.9961449498843485, | |
| "grad_norm": 0.03145446628332138, | |
| "learning_rate": 0.00010295404604510286, | |
| "loss": 0.8392, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.9976869699306091, | |
| "grad_norm": 0.032184895128011703, | |
| "learning_rate": 0.00010270793846761347, | |
| "loss": 0.7936, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.9992289899768697, | |
| "grad_norm": 0.027684815227985382, | |
| "learning_rate": 0.00010246181447527212, | |
| "loss": 0.6861, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.038702599704265594, | |
| "learning_rate": 0.00010221567556002153, | |
| "loss": 0.7216, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.0138009786605835, | |
| "eval_runtime": 121.2186, | |
| "eval_samples_per_second": 13.975, | |
| "eval_steps_per_second": 1.749, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 1.0015420200462606, | |
| "grad_norm": 0.04231274873018265, | |
| "learning_rate": 0.00010196952321389482, | |
| "loss": 0.5952, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.0030840400925212, | |
| "grad_norm": 0.031240805983543396, | |
| "learning_rate": 0.00010172335892900645, | |
| "loss": 0.7104, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 1.0046260601387818, | |
| "grad_norm": 0.030615027993917465, | |
| "learning_rate": 0.00010147718419754336, | |
| "loss": 0.6679, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 1.0061680801850423, | |
| "grad_norm": 0.029420683160424232, | |
| "learning_rate": 0.00010123100051175567, | |
| "loss": 0.6429, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 1.007710100231303, | |
| "grad_norm": 0.030873583629727364, | |
| "learning_rate": 0.00010098480936394801, | |
| "loss": 0.658, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 1.0092521202775635, | |
| "grad_norm": 0.031131163239479065, | |
| "learning_rate": 0.00010073861224647, | |
| "loss": 0.6273, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 1.0107941403238243, | |
| "grad_norm": 0.03470936417579651, | |
| "learning_rate": 0.00010049241065170765, | |
| "loss": 0.5581, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 1.012336160370085, | |
| "grad_norm": 0.03058730624616146, | |
| "learning_rate": 0.00010024620607207393, | |
| "loss": 0.6956, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 1.0138781804163455, | |
| "grad_norm": 0.02509419433772564, | |
| "learning_rate": 0.0001, | |
| "loss": 0.5643, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 1.015420200462606, | |
| "grad_norm": 0.03301594778895378, | |
| "learning_rate": 9.975379392792609e-05, | |
| "loss": 0.6595, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 1.0169622205088666, | |
| "grad_norm": 0.03824777528643608, | |
| "learning_rate": 9.950758934829241e-05, | |
| "loss": 0.6632, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.0185042405551272, | |
| "grad_norm": 0.04577745869755745, | |
| "learning_rate": 9.926138775352998e-05, | |
| "loss": 0.6584, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 1.0200462606013878, | |
| "grad_norm": 0.03415631502866745, | |
| "learning_rate": 9.901519063605203e-05, | |
| "loss": 0.6815, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 1.0215882806476484, | |
| "grad_norm": 0.03187074884772301, | |
| "learning_rate": 9.876899948824434e-05, | |
| "loss": 0.6878, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 1.023130300693909, | |
| "grad_norm": 0.03230864182114601, | |
| "learning_rate": 9.85228158024567e-05, | |
| "loss": 0.7208, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 1.0246723207401696, | |
| "grad_norm": 0.03281601890921593, | |
| "learning_rate": 9.827664107099359e-05, | |
| "loss": 0.6209, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 1.0262143407864301, | |
| "grad_norm": 0.030430031940340996, | |
| "learning_rate": 9.80304767861052e-05, | |
| "loss": 0.66, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 1.0277563608326907, | |
| "grad_norm": 0.027022497728466988, | |
| "learning_rate": 9.778432443997847e-05, | |
| "loss": 0.601, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 1.0292983808789515, | |
| "grad_norm": 0.03322981297969818, | |
| "learning_rate": 9.75381855247279e-05, | |
| "loss": 0.6658, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 1.0308404009252121, | |
| "grad_norm": 0.02994593232870102, | |
| "learning_rate": 9.729206153238657e-05, | |
| "loss": 0.6471, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 1.0323824209714727, | |
| "grad_norm": 0.027031417936086655, | |
| "learning_rate": 9.704595395489714e-05, | |
| "loss": 0.6096, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.0339244410177333, | |
| "grad_norm": 0.029254987835884094, | |
| "learning_rate": 9.679986428410276e-05, | |
| "loss": 0.6592, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 1.0354664610639939, | |
| "grad_norm": 0.030262276530265808, | |
| "learning_rate": 9.655379401173804e-05, | |
| "loss": 0.6317, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 1.0370084811102545, | |
| "grad_norm": 0.03326797112822533, | |
| "learning_rate": 9.630774462942e-05, | |
| "loss": 0.6071, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 1.038550501156515, | |
| "grad_norm": 0.029893390834331512, | |
| "learning_rate": 9.606171762863898e-05, | |
| "loss": 0.6497, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 1.0400925212027756, | |
| "grad_norm": 0.03093409165740013, | |
| "learning_rate": 9.581571450074968e-05, | |
| "loss": 0.6562, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 1.0416345412490362, | |
| "grad_norm": 0.030291495844721794, | |
| "learning_rate": 9.556973673696213e-05, | |
| "loss": 0.7115, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 1.0431765612952968, | |
| "grad_norm": 0.03254539519548416, | |
| "learning_rate": 9.532378582833259e-05, | |
| "loss": 0.6922, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 1.0447185813415574, | |
| "grad_norm": 0.028841672465205193, | |
| "learning_rate": 9.50778632657545e-05, | |
| "loss": 0.5989, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 1.046260601387818, | |
| "grad_norm": 0.03121812455356121, | |
| "learning_rate": 9.483197053994947e-05, | |
| "loss": 0.6697, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 1.0478026214340785, | |
| "grad_norm": 0.03435603901743889, | |
| "learning_rate": 9.458610914145826e-05, | |
| "loss": 0.7151, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.0493446414803393, | |
| "grad_norm": 0.03248435631394386, | |
| "learning_rate": 9.434028056063177e-05, | |
| "loss": 0.6599, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 1.0508866615266, | |
| "grad_norm": 0.0340297594666481, | |
| "learning_rate": 9.409448628762193e-05, | |
| "loss": 0.6588, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 1.0524286815728605, | |
| "grad_norm": 0.031176073476672173, | |
| "learning_rate": 9.384872781237269e-05, | |
| "loss": 0.7002, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 1.053970701619121, | |
| "grad_norm": 0.031643763184547424, | |
| "learning_rate": 9.360300662461103e-05, | |
| "loss": 0.6481, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 1.0555127216653817, | |
| "grad_norm": 0.03346734866499901, | |
| "learning_rate": 9.335732421383794e-05, | |
| "loss": 0.605, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 1.0570547417116423, | |
| "grad_norm": 0.028062671422958374, | |
| "learning_rate": 9.311168206931925e-05, | |
| "loss": 0.5722, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 1.0585967617579028, | |
| "grad_norm": 0.03068237006664276, | |
| "learning_rate": 9.286608168007678e-05, | |
| "loss": 0.6179, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 1.0601387818041634, | |
| "grad_norm": 0.03420115262269974, | |
| "learning_rate": 9.262052453487924e-05, | |
| "loss": 0.7138, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 1.061680801850424, | |
| "grad_norm": 0.03138900548219681, | |
| "learning_rate": 9.237501212223314e-05, | |
| "loss": 0.6974, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 1.0632228218966846, | |
| "grad_norm": 0.030687514692544937, | |
| "learning_rate": 9.212954593037394e-05, | |
| "loss": 0.6116, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.0647648419429452, | |
| "grad_norm": 0.03098466247320175, | |
| "learning_rate": 9.18841274472569e-05, | |
| "loss": 0.6968, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 1.0663068619892058, | |
| "grad_norm": 0.030333001166582108, | |
| "learning_rate": 9.163875816054794e-05, | |
| "loss": 0.6014, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 1.0678488820354666, | |
| "grad_norm": 0.031592730432748795, | |
| "learning_rate": 9.139343955761493e-05, | |
| "loss": 0.6784, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 1.0693909020817272, | |
| "grad_norm": 0.03480207920074463, | |
| "learning_rate": 9.11481731255184e-05, | |
| "loss": 0.7779, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 1.0709329221279877, | |
| "grad_norm": 0.03373701125383377, | |
| "learning_rate": 9.090296035100275e-05, | |
| "loss": 0.7062, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 1.0724749421742483, | |
| "grad_norm": 0.03207295015454292, | |
| "learning_rate": 9.065780272048701e-05, | |
| "loss": 0.7564, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 1.074016962220509, | |
| "grad_norm": 0.03260812535881996, | |
| "learning_rate": 9.041270172005598e-05, | |
| "loss": 0.663, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 1.0755589822667695, | |
| "grad_norm": 0.03293580561876297, | |
| "learning_rate": 9.016765883545117e-05, | |
| "loss": 0.6174, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 1.07710100231303, | |
| "grad_norm": 0.03376418352127075, | |
| "learning_rate": 8.992267555206185e-05, | |
| "loss": 0.6528, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 1.0786430223592907, | |
| "grad_norm": 0.040761563926935196, | |
| "learning_rate": 8.967775335491595e-05, | |
| "loss": 0.6229, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.0801850424055512, | |
| "grad_norm": 0.03353280574083328, | |
| "learning_rate": 8.943289372867118e-05, | |
| "loss": 0.6533, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 1.0817270624518118, | |
| "grad_norm": 0.031145334243774414, | |
| "learning_rate": 8.918809815760585e-05, | |
| "loss": 0.6625, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 1.0832690824980724, | |
| "grad_norm": 0.037107136100530624, | |
| "learning_rate": 8.894336812561005e-05, | |
| "loss": 0.6317, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 1.084811102544333, | |
| "grad_norm": 0.031154975295066833, | |
| "learning_rate": 8.869870511617661e-05, | |
| "loss": 0.6316, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 1.0863531225905936, | |
| "grad_norm": 0.029372645542025566, | |
| "learning_rate": 8.845411061239207e-05, | |
| "loss": 0.5326, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 1.0878951426368544, | |
| "grad_norm": 0.03378273546695709, | |
| "learning_rate": 8.820958609692769e-05, | |
| "loss": 0.6289, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 1.089437162683115, | |
| "grad_norm": 0.027697058394551277, | |
| "learning_rate": 8.796513305203048e-05, | |
| "loss": 0.6002, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 1.0909791827293756, | |
| "grad_norm": 0.03780931979417801, | |
| "learning_rate": 8.772075295951415e-05, | |
| "loss": 0.5932, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 1.0925212027756361, | |
| "grad_norm": 0.03167394921183586, | |
| "learning_rate": 8.74764473007504e-05, | |
| "loss": 0.6117, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 1.0940632228218967, | |
| "grad_norm": 0.03654084354639053, | |
| "learning_rate": 8.723221755665948e-05, | |
| "loss": 0.7338, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.0956052428681573, | |
| "grad_norm": 0.035005394369363785, | |
| "learning_rate": 8.698806520770161e-05, | |
| "loss": 0.738, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 1.0971472629144179, | |
| "grad_norm": 0.03315757215023041, | |
| "learning_rate": 8.674399173386779e-05, | |
| "loss": 0.5979, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 1.0986892829606785, | |
| "grad_norm": 0.042626503854990005, | |
| "learning_rate": 8.649999861467099e-05, | |
| "loss": 0.6772, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 1.100231303006939, | |
| "grad_norm": 0.035321805626153946, | |
| "learning_rate": 8.625608732913701e-05, | |
| "loss": 0.627, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 1.1017733230531996, | |
| "grad_norm": 0.0311568696051836, | |
| "learning_rate": 8.60122593557956e-05, | |
| "loss": 0.6204, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 1.1033153430994602, | |
| "grad_norm": 0.03266143798828125, | |
| "learning_rate": 8.57685161726715e-05, | |
| "loss": 0.7405, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 1.1048573631457208, | |
| "grad_norm": 0.03293644264340401, | |
| "learning_rate": 8.55248592572755e-05, | |
| "loss": 0.7476, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 1.1063993831919814, | |
| "grad_norm": 0.03008246421813965, | |
| "learning_rate": 8.528129008659544e-05, | |
| "loss": 0.6292, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 1.1079414032382422, | |
| "grad_norm": 0.029346976429224014, | |
| "learning_rate": 8.503781013708729e-05, | |
| "loss": 0.6413, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 1.1094834232845028, | |
| "grad_norm": 0.0354442335665226, | |
| "learning_rate": 8.479442088466611e-05, | |
| "loss": 0.6475, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.1110254433307634, | |
| "grad_norm": 0.030347274616360664, | |
| "learning_rate": 8.455112380469729e-05, | |
| "loss": 0.6532, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 1.112567463377024, | |
| "grad_norm": 0.03859079256653786, | |
| "learning_rate": 8.430792037198737e-05, | |
| "loss": 0.7131, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 1.1141094834232845, | |
| "grad_norm": 0.0332760214805603, | |
| "learning_rate": 8.406481206077534e-05, | |
| "loss": 0.5202, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 1.1156515034695451, | |
| "grad_norm": 0.03403365612030029, | |
| "learning_rate": 8.382180034472353e-05, | |
| "loss": 0.7009, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 1.1171935235158057, | |
| "grad_norm": 0.03711424767971039, | |
| "learning_rate": 8.357888669690876e-05, | |
| "loss": 0.7204, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 1.1187355435620663, | |
| "grad_norm": 0.030927130952477455, | |
| "learning_rate": 8.333607258981329e-05, | |
| "loss": 0.636, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 1.1202775636083269, | |
| "grad_norm": 0.03724541887640953, | |
| "learning_rate": 8.30933594953161e-05, | |
| "loss": 0.6751, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 1.1218195836545874, | |
| "grad_norm": 0.036198195070028305, | |
| "learning_rate": 8.285074888468383e-05, | |
| "loss": 0.7036, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 1.123361603700848, | |
| "grad_norm": 0.0362878255546093, | |
| "learning_rate": 8.26082422285619e-05, | |
| "loss": 0.6865, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 1.1249036237471086, | |
| "grad_norm": 0.03348914906382561, | |
| "learning_rate": 8.236584099696554e-05, | |
| "loss": 0.7005, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.1264456437933692, | |
| "grad_norm": 0.036960627883672714, | |
| "learning_rate": 8.212354665927088e-05, | |
| "loss": 0.719, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 1.12798766383963, | |
| "grad_norm": 0.03627372533082962, | |
| "learning_rate": 8.188136068420627e-05, | |
| "loss": 0.6358, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 1.1295296838858906, | |
| "grad_norm": 0.03624941408634186, | |
| "learning_rate": 8.163928453984298e-05, | |
| "loss": 0.6836, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 1.1310717039321512, | |
| "grad_norm": 0.03319893777370453, | |
| "learning_rate": 8.139731969358663e-05, | |
| "loss": 0.6449, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 1.1326137239784118, | |
| "grad_norm": 0.03332339972257614, | |
| "learning_rate": 8.115546761216822e-05, | |
| "loss": 0.7252, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 1.1341557440246723, | |
| "grad_norm": 0.030635269358754158, | |
| "learning_rate": 8.091372976163495e-05, | |
| "loss": 0.6092, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 1.135697764070933, | |
| "grad_norm": 0.03975079208612442, | |
| "learning_rate": 8.067210760734199e-05, | |
| "loss": 0.7069, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 1.1372397841171935, | |
| "grad_norm": 0.036304909735918045, | |
| "learning_rate": 8.043060261394282e-05, | |
| "loss": 0.6257, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 1.138781804163454, | |
| "grad_norm": 0.0363340899348259, | |
| "learning_rate": 8.01892162453809e-05, | |
| "loss": 0.6912, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 1.1403238242097147, | |
| "grad_norm": 0.030387498438358307, | |
| "learning_rate": 7.994794996488055e-05, | |
| "loss": 0.6096, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.1418658442559753, | |
| "grad_norm": 0.03291669860482216, | |
| "learning_rate": 7.970680523493818e-05, | |
| "loss": 0.6629, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 1.1434078643022358, | |
| "grad_norm": 0.03067462518811226, | |
| "learning_rate": 7.94657835173134e-05, | |
| "loss": 0.6726, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 1.1449498843484966, | |
| "grad_norm": 0.03496173024177551, | |
| "learning_rate": 7.922488627302015e-05, | |
| "loss": 0.7331, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 1.146491904394757, | |
| "grad_norm": 0.03458810970187187, | |
| "learning_rate": 7.89841149623178e-05, | |
| "loss": 0.6987, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 1.1480339244410178, | |
| "grad_norm": 0.03519074246287346, | |
| "learning_rate": 7.874347104470234e-05, | |
| "loss": 0.5909, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 1.1495759444872784, | |
| "grad_norm": 0.03368091583251953, | |
| "learning_rate": 7.85029559788976e-05, | |
| "loss": 0.6144, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 1.151117964533539, | |
| "grad_norm": 0.03208000957965851, | |
| "learning_rate": 7.826257122284629e-05, | |
| "loss": 0.6479, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 1.1526599845797996, | |
| "grad_norm": 0.03380461782217026, | |
| "learning_rate": 7.802231823370125e-05, | |
| "loss": 0.7049, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 1.1542020046260602, | |
| "grad_norm": 0.03315620869398117, | |
| "learning_rate": 7.778219846781653e-05, | |
| "loss": 0.6683, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 1.1557440246723207, | |
| "grad_norm": 0.03291339799761772, | |
| "learning_rate": 7.754221338073864e-05, | |
| "loss": 0.6011, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.1572860447185813, | |
| "grad_norm": 0.031395379453897476, | |
| "learning_rate": 7.730236442719774e-05, | |
| "loss": 0.5657, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 1.158828064764842, | |
| "grad_norm": 0.03961668163537979, | |
| "learning_rate": 7.706265306109872e-05, | |
| "loss": 0.6831, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 1.1603700848111025, | |
| "grad_norm": 0.031115271151065826, | |
| "learning_rate": 7.68230807355125e-05, | |
| "loss": 0.5597, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 1.161912104857363, | |
| "grad_norm": 0.030850525945425034, | |
| "learning_rate": 7.658364890266711e-05, | |
| "loss": 0.6421, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 1.1634541249036237, | |
| "grad_norm": 0.03267287090420723, | |
| "learning_rate": 7.634435901393899e-05, | |
| "loss": 0.6157, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 1.1649961449498845, | |
| "grad_norm": 0.03116137906908989, | |
| "learning_rate": 7.610521251984419e-05, | |
| "loss": 0.6098, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 1.166538164996145, | |
| "grad_norm": 0.03797944262623787, | |
| "learning_rate": 7.586621087002945e-05, | |
| "loss": 0.6847, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 1.1680801850424056, | |
| "grad_norm": 0.03272239863872528, | |
| "learning_rate": 7.562735551326355e-05, | |
| "loss": 0.637, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 1.1696222050886662, | |
| "grad_norm": 0.03280835226178169, | |
| "learning_rate": 7.538864789742845e-05, | |
| "loss": 0.629, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 1.1711642251349268, | |
| "grad_norm": 0.03413494676351547, | |
| "learning_rate": 7.51500894695106e-05, | |
| "loss": 0.6661, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.1727062451811874, | |
| "grad_norm": 0.030523747205734253, | |
| "learning_rate": 7.491168167559209e-05, | |
| "loss": 0.5446, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 1.174248265227448, | |
| "grad_norm": 0.03329429030418396, | |
| "learning_rate": 7.467342596084179e-05, | |
| "loss": 0.6319, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 1.1757902852737085, | |
| "grad_norm": 0.0314452238380909, | |
| "learning_rate": 7.443532376950688e-05, | |
| "loss": 0.6161, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 1.1773323053199691, | |
| "grad_norm": 0.03715851530432701, | |
| "learning_rate": 7.419737654490379e-05, | |
| "loss": 0.7136, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 1.1788743253662297, | |
| "grad_norm": 0.03489915654063225, | |
| "learning_rate": 7.395958572940966e-05, | |
| "loss": 0.6072, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 1.1804163454124903, | |
| "grad_norm": 0.0346875824034214, | |
| "learning_rate": 7.37219527644535e-05, | |
| "loss": 0.5942, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 1.1819583654587509, | |
| "grad_norm": 0.03557261452078819, | |
| "learning_rate": 7.34844790905074e-05, | |
| "loss": 0.6739, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 1.1835003855050115, | |
| "grad_norm": 0.03237386420369148, | |
| "learning_rate": 7.324716614707793e-05, | |
| "loss": 0.7092, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 1.1850424055512723, | |
| "grad_norm": 0.035937052220106125, | |
| "learning_rate": 7.301001537269736e-05, | |
| "loss": 0.6396, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 1.1865844255975329, | |
| "grad_norm": 0.03329618275165558, | |
| "learning_rate": 7.277302820491491e-05, | |
| "loss": 0.6302, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.1881264456437934, | |
| "grad_norm": 0.03639035299420357, | |
| "learning_rate": 7.25362060802881e-05, | |
| "loss": 0.6404, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 1.189668465690054, | |
| "grad_norm": 0.03558572009205818, | |
| "learning_rate": 7.229955043437391e-05, | |
| "loss": 0.5638, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 1.1912104857363146, | |
| "grad_norm": 0.03477177023887634, | |
| "learning_rate": 7.206306270172019e-05, | |
| "loss": 0.5829, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 1.1927525057825752, | |
| "grad_norm": 0.03548658639192581, | |
| "learning_rate": 7.182674431585704e-05, | |
| "loss": 0.5756, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 1.1942945258288358, | |
| "grad_norm": 0.03554500639438629, | |
| "learning_rate": 7.159059670928791e-05, | |
| "loss": 0.6173, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 1.1958365458750964, | |
| "grad_norm": 0.03374260663986206, | |
| "learning_rate": 7.135462131348107e-05, | |
| "loss": 0.6626, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 1.197378565921357, | |
| "grad_norm": 0.034156035631895065, | |
| "learning_rate": 7.111881955886093e-05, | |
| "loss": 0.6719, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 1.1989205859676175, | |
| "grad_norm": 0.03379127383232117, | |
| "learning_rate": 7.088319287479919e-05, | |
| "loss": 0.5903, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 1.200462606013878, | |
| "grad_norm": 0.035499654710292816, | |
| "learning_rate": 7.064774268960653e-05, | |
| "loss": 0.6262, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 1.2020046260601387, | |
| "grad_norm": 0.03328956291079521, | |
| "learning_rate": 7.041247043052357e-05, | |
| "loss": 0.5538, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.2035466461063993, | |
| "grad_norm": 0.03272480517625809, | |
| "learning_rate": 7.017737752371244e-05, | |
| "loss": 0.6044, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 1.20508866615266, | |
| "grad_norm": 0.036613889038562775, | |
| "learning_rate": 6.994246539424808e-05, | |
| "loss": 0.6649, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 1.2066306861989207, | |
| "grad_norm": 0.03719131276011467, | |
| "learning_rate": 6.970773546610958e-05, | |
| "loss": 0.6586, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 1.2081727062451812, | |
| "grad_norm": 0.0346304327249527, | |
| "learning_rate": 6.947318916217168e-05, | |
| "loss": 0.657, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 1.2097147262914418, | |
| "grad_norm": 0.04230980947613716, | |
| "learning_rate": 6.923882790419585e-05, | |
| "loss": 0.8137, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 1.2112567463377024, | |
| "grad_norm": 0.036169420927762985, | |
| "learning_rate": 6.900465311282201e-05, | |
| "loss": 0.6562, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 1.212798766383963, | |
| "grad_norm": 0.03368053212761879, | |
| "learning_rate": 6.87706662075597e-05, | |
| "loss": 0.6302, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 1.2143407864302236, | |
| "grad_norm": 0.028687801212072372, | |
| "learning_rate": 6.853686860677949e-05, | |
| "loss": 0.5765, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 1.2158828064764842, | |
| "grad_norm": 0.03682614117860794, | |
| "learning_rate": 6.830326172770463e-05, | |
| "loss": 0.728, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 1.2174248265227448, | |
| "grad_norm": 0.032425619661808014, | |
| "learning_rate": 6.806984698640202e-05, | |
| "loss": 0.6618, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.2189668465690053, | |
| "grad_norm": 0.04002368077635765, | |
| "learning_rate": 6.783662579777401e-05, | |
| "loss": 0.6823, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 1.220508866615266, | |
| "grad_norm": 0.04177991300821304, | |
| "learning_rate": 6.760359957554963e-05, | |
| "loss": 0.6711, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 1.2220508866615265, | |
| "grad_norm": 0.0367995947599411, | |
| "learning_rate": 6.737076973227614e-05, | |
| "loss": 0.7178, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 1.223592906707787, | |
| "grad_norm": 0.037920210510492325, | |
| "learning_rate": 6.713813767931031e-05, | |
| "loss": 0.6577, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 1.225134926754048, | |
| "grad_norm": 0.03627100959420204, | |
| "learning_rate": 6.690570482681002e-05, | |
| "loss": 0.6079, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 1.2266769468003085, | |
| "grad_norm": 0.034207917749881744, | |
| "learning_rate": 6.667347258372559e-05, | |
| "loss": 0.6746, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 1.228218966846569, | |
| "grad_norm": 0.037893664091825485, | |
| "learning_rate": 6.644144235779132e-05, | |
| "loss": 0.6847, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 1.2297609868928296, | |
| "grad_norm": 0.039413537830114365, | |
| "learning_rate": 6.620961555551697e-05, | |
| "loss": 0.6923, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 1.2313030069390902, | |
| "grad_norm": 0.03504036366939545, | |
| "learning_rate": 6.597799358217915e-05, | |
| "loss": 0.6434, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 1.2328450269853508, | |
| "grad_norm": 0.036619458347558975, | |
| "learning_rate": 6.574657784181287e-05, | |
| "loss": 0.6321, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.2343870470316114, | |
| "grad_norm": 0.03508542478084564, | |
| "learning_rate": 6.551536973720298e-05, | |
| "loss": 0.6873, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 1.235929067077872, | |
| "grad_norm": 0.038882650434970856, | |
| "learning_rate": 6.52843706698757e-05, | |
| "loss": 0.7239, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 1.2374710871241326, | |
| "grad_norm": 0.03949695825576782, | |
| "learning_rate": 6.505358204009017e-05, | |
| "loss": 0.7489, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 1.2390131071703931, | |
| "grad_norm": 0.03820425644516945, | |
| "learning_rate": 6.482300524682985e-05, | |
| "loss": 0.6727, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 1.2405551272166537, | |
| "grad_norm": 0.033910010010004044, | |
| "learning_rate": 6.459264168779415e-05, | |
| "loss": 0.6468, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 1.2420971472629145, | |
| "grad_norm": 0.03440403565764427, | |
| "learning_rate": 6.436249275938977e-05, | |
| "loss": 0.6257, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 1.243639167309175, | |
| "grad_norm": 0.033627837896347046, | |
| "learning_rate": 6.413255985672262e-05, | |
| "loss": 0.6593, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 1.2451811873554357, | |
| "grad_norm": 0.04215480387210846, | |
| "learning_rate": 6.390284437358888e-05, | |
| "loss": 0.6577, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 1.2467232074016963, | |
| "grad_norm": 0.04215266555547714, | |
| "learning_rate": 6.367334770246693e-05, | |
| "loss": 0.618, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 1.2482652274479569, | |
| "grad_norm": 0.034282535314559937, | |
| "learning_rate": 6.344407123450866e-05, | |
| "loss": 0.5776, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.2498072474942175, | |
| "grad_norm": 0.0348060242831707, | |
| "learning_rate": 6.321501635953124e-05, | |
| "loss": 0.6657, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 1.251349267540478, | |
| "grad_norm": 0.04382369667291641, | |
| "learning_rate": 6.298618446600856e-05, | |
| "loss": 0.6688, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 1.2528912875867386, | |
| "grad_norm": 0.03689052164554596, | |
| "learning_rate": 6.275757694106286e-05, | |
| "loss": 0.7098, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 1.2544333076329992, | |
| "grad_norm": 0.03462991490960121, | |
| "learning_rate": 6.252919517045625e-05, | |
| "loss": 0.6812, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 1.2559753276792598, | |
| "grad_norm": 0.03429633378982544, | |
| "learning_rate": 6.230104053858247e-05, | |
| "loss": 0.6139, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 1.2575173477255204, | |
| "grad_norm": 0.041576385498046875, | |
| "learning_rate": 6.207311442845834e-05, | |
| "loss": 0.6633, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 1.259059367771781, | |
| "grad_norm": 0.03809819743037224, | |
| "learning_rate": 6.184541822171545e-05, | |
| "loss": 0.6521, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 1.2606013878180415, | |
| "grad_norm": 0.03564080595970154, | |
| "learning_rate": 6.161795329859183e-05, | |
| "loss": 0.6735, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 1.2621434078643023, | |
| "grad_norm": 0.038920801132917404, | |
| "learning_rate": 6.13907210379234e-05, | |
| "loss": 0.752, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 1.2636854279105627, | |
| "grad_norm": 0.03850522264838219, | |
| "learning_rate": 6.116372281713581e-05, | |
| "loss": 0.6547, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.2652274479568235, | |
| "grad_norm": 0.03561043366789818, | |
| "learning_rate": 6.093696001223609e-05, | |
| "loss": 0.6592, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 1.266769468003084, | |
| "grad_norm": 0.03549296036362648, | |
| "learning_rate": 6.071043399780412e-05, | |
| "loss": 0.6246, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 1.2683114880493447, | |
| "grad_norm": 0.03841027989983559, | |
| "learning_rate": 6.048414614698448e-05, | |
| "loss": 0.6699, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 1.2698535080956053, | |
| "grad_norm": 0.0381116084754467, | |
| "learning_rate": 6.0258097831478024e-05, | |
| "loss": 0.6542, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 1.2713955281418658, | |
| "grad_norm": 0.03543659672141075, | |
| "learning_rate": 6.00322904215336e-05, | |
| "loss": 0.699, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 1.2729375481881264, | |
| "grad_norm": 0.03378836065530777, | |
| "learning_rate": 5.98067252859398e-05, | |
| "loss": 0.6506, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 1.274479568234387, | |
| "grad_norm": 0.03210572898387909, | |
| "learning_rate": 5.95814037920166e-05, | |
| "loss": 0.5834, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 1.2760215882806476, | |
| "grad_norm": 0.034539416432380676, | |
| "learning_rate": 5.935632730560702e-05, | |
| "loss": 0.6355, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 1.2775636083269082, | |
| "grad_norm": 0.03431408852338791, | |
| "learning_rate": 5.913149719106896e-05, | |
| "loss": 0.6222, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 1.2791056283731688, | |
| "grad_norm": 0.03308793902397156, | |
| "learning_rate": 5.89069148112668e-05, | |
| "loss": 0.5993, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.2806476484194294, | |
| "grad_norm": 0.0353439524769783, | |
| "learning_rate": 5.8682581527563366e-05, | |
| "loss": 0.6453, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 1.2821896684656902, | |
| "grad_norm": 0.03516675531864166, | |
| "learning_rate": 5.845849869981137e-05, | |
| "loss": 0.6377, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 1.2837316885119505, | |
| "grad_norm": 0.03784884139895439, | |
| "learning_rate": 5.823466768634538e-05, | |
| "loss": 0.6936, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 1.2852737085582113, | |
| "grad_norm": 0.03621688485145569, | |
| "learning_rate": 5.801108984397354e-05, | |
| "loss": 0.6495, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 1.286815728604472, | |
| "grad_norm": 0.0380130410194397, | |
| "learning_rate": 5.7787766527969354e-05, | |
| "loss": 0.578, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 1.2883577486507325, | |
| "grad_norm": 0.045173536986112595, | |
| "learning_rate": 5.7564699092063345e-05, | |
| "loss": 0.6438, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 1.289899768696993, | |
| "grad_norm": 0.03988794982433319, | |
| "learning_rate": 5.7341888888435126e-05, | |
| "loss": 0.7357, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 1.2914417887432537, | |
| "grad_norm": 0.039014436304569244, | |
| "learning_rate": 5.7119337267704866e-05, | |
| "loss": 0.7272, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 1.2929838087895142, | |
| "grad_norm": 0.03429098427295685, | |
| "learning_rate": 5.689704557892528e-05, | |
| "loss": 0.5897, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 1.2945258288357748, | |
| "grad_norm": 0.036472566425800323, | |
| "learning_rate": 5.6675015169573655e-05, | |
| "loss": 0.6074, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.2960678488820354, | |
| "grad_norm": 0.033156655728816986, | |
| "learning_rate": 5.6453247385543206e-05, | |
| "loss": 0.555, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 1.297609868928296, | |
| "grad_norm": 0.03926177695393562, | |
| "learning_rate": 5.623174357113528e-05, | |
| "loss": 0.8, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 1.2991518889745568, | |
| "grad_norm": 0.041468918323516846, | |
| "learning_rate": 5.6010505069051145e-05, | |
| "loss": 0.7158, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 1.3006939090208172, | |
| "grad_norm": 0.034991636872291565, | |
| "learning_rate": 5.578953322038372e-05, | |
| "loss": 0.5738, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 1.302235929067078, | |
| "grad_norm": 0.04295094683766365, | |
| "learning_rate": 5.5568829364609664e-05, | |
| "loss": 0.6652, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 1.3037779491133386, | |
| "grad_norm": 0.03196045756340027, | |
| "learning_rate": 5.5348394839580986e-05, | |
| "loss": 0.6114, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 1.3053199691595991, | |
| "grad_norm": 0.03417590633034706, | |
| "learning_rate": 5.51282309815172e-05, | |
| "loss": 0.6307, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 1.3068619892058597, | |
| "grad_norm": 0.033190734684467316, | |
| "learning_rate": 5.4908339124997e-05, | |
| "loss": 0.6327, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 1.3084040092521203, | |
| "grad_norm": 0.03339439630508423, | |
| "learning_rate": 5.468872060295034e-05, | |
| "loss": 0.6274, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 1.3099460292983809, | |
| "grad_norm": 0.038281556218862534, | |
| "learning_rate": 5.446937674665034e-05, | |
| "loss": 0.7243, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.3114880493446415, | |
| "grad_norm": 0.03810643032193184, | |
| "learning_rate": 5.4250308885705056e-05, | |
| "loss": 0.7178, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 1.313030069390902, | |
| "grad_norm": 0.03502066433429718, | |
| "learning_rate": 5.403151834804951e-05, | |
| "loss": 0.6897, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 1.3145720894371626, | |
| "grad_norm": 0.034795403480529785, | |
| "learning_rate": 5.381300645993779e-05, | |
| "loss": 0.6483, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 1.3161141094834232, | |
| "grad_norm": 0.035580191761255264, | |
| "learning_rate": 5.3594774545934825e-05, | |
| "loss": 0.6123, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 1.3176561295296838, | |
| "grad_norm": 0.03703007102012634, | |
| "learning_rate": 5.337682392890833e-05, | |
| "loss": 0.7408, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 1.3191981495759446, | |
| "grad_norm": 0.06035947799682617, | |
| "learning_rate": 5.3159155930021e-05, | |
| "loss": 0.6696, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 1.320740169622205, | |
| "grad_norm": 0.03433072566986084, | |
| "learning_rate": 5.2941771868722265e-05, | |
| "loss": 0.5911, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 1.3222821896684658, | |
| "grad_norm": 0.037705931812524796, | |
| "learning_rate": 5.27246730627404e-05, | |
| "loss": 0.6841, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 1.3238242097147264, | |
| "grad_norm": 0.03678227216005325, | |
| "learning_rate": 5.250786082807462e-05, | |
| "loss": 0.7193, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 1.325366229760987, | |
| "grad_norm": 0.039237961173057556, | |
| "learning_rate": 5.2291336478986964e-05, | |
| "loss": 0.6715, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.3269082498072475, | |
| "grad_norm": 0.036990754306316376, | |
| "learning_rate": 5.207510132799436e-05, | |
| "loss": 0.6092, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 1.3284502698535081, | |
| "grad_norm": 0.03487098589539528, | |
| "learning_rate": 5.1859156685860656e-05, | |
| "loss": 0.6486, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 1.3299922898997687, | |
| "grad_norm": 0.04065219685435295, | |
| "learning_rate": 5.1643503861588805e-05, | |
| "loss": 0.6473, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 1.3315343099460293, | |
| "grad_norm": 0.03509150445461273, | |
| "learning_rate": 5.142814416241282e-05, | |
| "loss": 0.6324, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 1.3330763299922899, | |
| "grad_norm": 0.03709571436047554, | |
| "learning_rate": 5.121307889378975e-05, | |
| "loss": 0.6533, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 1.3346183500385504, | |
| "grad_norm": 0.03225082904100418, | |
| "learning_rate": 5.099830935939203e-05, | |
| "loss": 0.6267, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 1.336160370084811, | |
| "grad_norm": 0.042506713420152664, | |
| "learning_rate": 5.078383686109926e-05, | |
| "loss": 0.7575, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 1.3377023901310716, | |
| "grad_norm": 0.03380179777741432, | |
| "learning_rate": 5.056966269899069e-05, | |
| "loss": 0.6129, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 1.3392444101773324, | |
| "grad_norm": 0.04255492612719536, | |
| "learning_rate": 5.0355788171336914e-05, | |
| "loss": 0.6912, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 1.3407864302235928, | |
| "grad_norm": 0.03951529785990715, | |
| "learning_rate": 5.01422145745924e-05, | |
| "loss": 0.62, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.3423284502698536, | |
| "grad_norm": 0.0411263071000576, | |
| "learning_rate": 4.992894320338727e-05, | |
| "loss": 0.667, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 1.3438704703161142, | |
| "grad_norm": 0.03915927931666374, | |
| "learning_rate": 4.971597535051977e-05, | |
| "loss": 0.6953, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 1.3454124903623748, | |
| "grad_norm": 0.04352157562971115, | |
| "learning_rate": 4.950331230694829e-05, | |
| "loss": 0.7338, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 1.3469545104086353, | |
| "grad_norm": 0.0368880033493042, | |
| "learning_rate": 4.929095536178346e-05, | |
| "loss": 0.6523, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 1.348496530454896, | |
| "grad_norm": 0.03794132545590401, | |
| "learning_rate": 4.907890580228042e-05, | |
| "loss": 0.6702, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 1.3500385505011565, | |
| "grad_norm": 0.03402203321456909, | |
| "learning_rate": 4.886716491383111e-05, | |
| "loss": 0.6168, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 1.351580570547417, | |
| "grad_norm": 0.03810921311378479, | |
| "learning_rate": 4.8655733979956255e-05, | |
| "loss": 0.6744, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 1.3531225905936777, | |
| "grad_norm": 0.034791238605976105, | |
| "learning_rate": 4.844461428229782e-05, | |
| "loss": 0.6058, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 1.3546646106399383, | |
| "grad_norm": 0.03566417098045349, | |
| "learning_rate": 4.8233807100611114e-05, | |
| "loss": 0.5803, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 1.3562066306861988, | |
| "grad_norm": 0.04459409415721893, | |
| "learning_rate": 4.802331371275702e-05, | |
| "loss": 0.6416, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.3577486507324594, | |
| "grad_norm": 0.03633615002036095, | |
| "learning_rate": 4.781313539469423e-05, | |
| "loss": 0.6412, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 1.3592906707787202, | |
| "grad_norm": 0.03890186548233032, | |
| "learning_rate": 4.760327342047167e-05, | |
| "loss": 0.6441, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 1.3608326908249806, | |
| "grad_norm": 0.03216012567281723, | |
| "learning_rate": 4.739372906222066e-05, | |
| "loss": 0.6074, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 1.3623747108712414, | |
| "grad_norm": 0.03994244709610939, | |
| "learning_rate": 4.718450359014712e-05, | |
| "loss": 0.6768, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 1.363916730917502, | |
| "grad_norm": 0.03384639695286751, | |
| "learning_rate": 4.697559827252398e-05, | |
| "loss": 0.5744, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 1.3654587509637626, | |
| "grad_norm": 0.04195882007479668, | |
| "learning_rate": 4.676701437568354e-05, | |
| "loss": 0.6136, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 1.3670007710100232, | |
| "grad_norm": 0.035277076065540314, | |
| "learning_rate": 4.655875316400974e-05, | |
| "loss": 0.591, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 1.3685427910562837, | |
| "grad_norm": 0.03624827414751053, | |
| "learning_rate": 4.6350815899930336e-05, | |
| "loss": 0.6236, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 1.3700848111025443, | |
| "grad_norm": 0.03686223924160004, | |
| "learning_rate": 4.614320384390959e-05, | |
| "loss": 0.6283, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 1.371626831148805, | |
| "grad_norm": 0.04274080693721771, | |
| "learning_rate": 4.593591825444028e-05, | |
| "loss": 0.6367, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.3731688511950655, | |
| "grad_norm": 0.03906931355595589, | |
| "learning_rate": 4.5728960388036214e-05, | |
| "loss": 0.6403, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 1.374710871241326, | |
| "grad_norm": 0.036612290889024734, | |
| "learning_rate": 4.5522331499224835e-05, | |
| "loss": 0.6149, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 1.3762528912875869, | |
| "grad_norm": 0.03672497719526291, | |
| "learning_rate": 4.531603284053919e-05, | |
| "loss": 0.6335, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 1.3777949113338472, | |
| "grad_norm": 0.03735297545790672, | |
| "learning_rate": 4.511006566251059e-05, | |
| "loss": 0.6525, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 1.379336931380108, | |
| "grad_norm": 0.042314548045396805, | |
| "learning_rate": 4.490443121366105e-05, | |
| "loss": 0.6172, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 1.3808789514263684, | |
| "grad_norm": 0.035961080342531204, | |
| "learning_rate": 4.469913074049567e-05, | |
| "loss": 0.5667, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 1.3824209714726292, | |
| "grad_norm": 0.03719279170036316, | |
| "learning_rate": 4.449416548749503e-05, | |
| "loss": 0.6286, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 1.3839629915188898, | |
| "grad_norm": 0.035268232226371765, | |
| "learning_rate": 4.4289536697107636e-05, | |
| "loss": 0.6249, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 1.3855050115651504, | |
| "grad_norm": 0.03824489563703537, | |
| "learning_rate": 4.4085245609742595e-05, | |
| "loss": 0.6263, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 1.387047031611411, | |
| "grad_norm": 0.04036892205476761, | |
| "learning_rate": 4.388129346376178e-05, | |
| "loss": 0.6698, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.3885890516576715, | |
| "grad_norm": 0.03689991310238838, | |
| "learning_rate": 4.367768149547256e-05, | |
| "loss": 0.6192, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 1.3901310717039321, | |
| "grad_norm": 0.03646843880414963, | |
| "learning_rate": 4.34744109391203e-05, | |
| "loss": 0.6822, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 1.3916730917501927, | |
| "grad_norm": 0.036306463181972504, | |
| "learning_rate": 4.3271483026880686e-05, | |
| "loss": 0.7123, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 1.3932151117964533, | |
| "grad_norm": 0.034124668687582016, | |
| "learning_rate": 4.3068898988852404e-05, | |
| "loss": 0.6119, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 1.3947571318427139, | |
| "grad_norm": 0.041610926389694214, | |
| "learning_rate": 4.286666005304971e-05, | |
| "loss": 0.6478, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 1.3962991518889747, | |
| "grad_norm": 0.04072918742895126, | |
| "learning_rate": 4.266476744539496e-05, | |
| "loss": 0.6173, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 1.397841171935235, | |
| "grad_norm": 0.04097921401262283, | |
| "learning_rate": 4.246322238971105e-05, | |
| "loss": 0.6663, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 1.3993831919814959, | |
| "grad_norm": 0.04002819582819939, | |
| "learning_rate": 4.226202610771419e-05, | |
| "loss": 0.6175, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 1.4009252120277564, | |
| "grad_norm": 0.036188431084156036, | |
| "learning_rate": 4.206117981900636e-05, | |
| "loss": 0.6083, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 1.402467232074017, | |
| "grad_norm": 0.044372767210006714, | |
| "learning_rate": 4.186068474106802e-05, | |
| "loss": 0.6594, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.4040092521202776, | |
| "grad_norm": 0.044394172728061676, | |
| "learning_rate": 4.16605420892506e-05, | |
| "loss": 0.6764, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 1.4055512721665382, | |
| "grad_norm": 0.03883661702275276, | |
| "learning_rate": 4.146075307676932e-05, | |
| "loss": 0.6484, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 1.4070932922127988, | |
| "grad_norm": 0.0384899266064167, | |
| "learning_rate": 4.126131891469561e-05, | |
| "loss": 0.6472, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 1.4086353122590594, | |
| "grad_norm": 0.03456205874681473, | |
| "learning_rate": 4.106224081194989e-05, | |
| "loss": 0.5966, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 1.41017733230532, | |
| "grad_norm": 0.038852810859680176, | |
| "learning_rate": 4.086351997529441e-05, | |
| "loss": 0.6582, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 1.4117193523515805, | |
| "grad_norm": 0.03524342551827431, | |
| "learning_rate": 4.0665157609325565e-05, | |
| "loss": 0.6349, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 1.413261372397841, | |
| "grad_norm": 0.039776511490345, | |
| "learning_rate": 4.046715491646683e-05, | |
| "loss": 0.6392, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 1.4148033924441017, | |
| "grad_norm": 0.03844306245446205, | |
| "learning_rate": 4.026951309696152e-05, | |
| "loss": 0.6372, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 1.4163454124903625, | |
| "grad_norm": 0.03955772891640663, | |
| "learning_rate": 4.007223334886531e-05, | |
| "loss": 0.6827, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 1.4178874325366229, | |
| "grad_norm": 0.03288029879331589, | |
| "learning_rate": 3.9875316868039206e-05, | |
| "loss": 0.556, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.4194294525828837, | |
| "grad_norm": 0.03645135462284088, | |
| "learning_rate": 3.9678764848142024e-05, | |
| "loss": 0.6617, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 1.4209714726291442, | |
| "grad_norm": 0.03935571014881134, | |
| "learning_rate": 3.948257848062351e-05, | |
| "loss": 0.6806, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 1.4225134926754048, | |
| "grad_norm": 0.03671710938215256, | |
| "learning_rate": 3.9286758954716696e-05, | |
| "loss": 0.6654, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 1.4240555127216654, | |
| "grad_norm": 0.03780873119831085, | |
| "learning_rate": 3.909130745743108e-05, | |
| "loss": 0.6481, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 1.425597532767926, | |
| "grad_norm": 0.035091038793325424, | |
| "learning_rate": 3.889622517354523e-05, | |
| "loss": 0.6611, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 1.4271395528141866, | |
| "grad_norm": 0.0360507071018219, | |
| "learning_rate": 3.870151328559956e-05, | |
| "loss": 0.6477, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 1.4286815728604472, | |
| "grad_norm": 0.03495753929018974, | |
| "learning_rate": 3.850717297388926e-05, | |
| "loss": 0.5893, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 1.4302235929067078, | |
| "grad_norm": 0.03431479260325432, | |
| "learning_rate": 3.8313205416457164e-05, | |
| "loss": 0.6049, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 1.4317656129529683, | |
| "grad_norm": 0.03628220409154892, | |
| "learning_rate": 3.811961178908657e-05, | |
| "loss": 0.5614, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 1.433307632999229, | |
| "grad_norm": 0.0385250449180603, | |
| "learning_rate": 3.7926393265294017e-05, | |
| "loss": 0.6891, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.4348496530454895, | |
| "grad_norm": 0.03749658167362213, | |
| "learning_rate": 3.773355101632235e-05, | |
| "loss": 0.6655, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 1.4363916730917503, | |
| "grad_norm": 0.041382692754268646, | |
| "learning_rate": 3.75410862111335e-05, | |
| "loss": 0.7552, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 1.4379336931380107, | |
| "grad_norm": 0.040209271013736725, | |
| "learning_rate": 3.734900001640135e-05, | |
| "loss": 0.7837, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 1.4394757131842715, | |
| "grad_norm": 0.034024372696876526, | |
| "learning_rate": 3.715729359650486e-05, | |
| "loss": 0.5879, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 1.441017733230532, | |
| "grad_norm": 0.039757728576660156, | |
| "learning_rate": 3.6965968113520864e-05, | |
| "loss": 0.7303, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 1.4425597532767926, | |
| "grad_norm": 0.036601316183805466, | |
| "learning_rate": 3.677502472721699e-05, | |
| "loss": 0.635, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 1.4441017733230532, | |
| "grad_norm": 0.03675416484475136, | |
| "learning_rate": 3.658446459504471e-05, | |
| "loss": 0.6311, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 1.4456437933693138, | |
| "grad_norm": 0.03397215157747269, | |
| "learning_rate": 3.6394288872132335e-05, | |
| "loss": 0.5638, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 1.4471858134155744, | |
| "grad_norm": 0.03988713398575783, | |
| "learning_rate": 3.620449871127801e-05, | |
| "loss": 0.639, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 1.448727833461835, | |
| "grad_norm": 0.036214519292116165, | |
| "learning_rate": 3.6015095262942574e-05, | |
| "loss": 0.6345, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.4502698535080956, | |
| "grad_norm": 0.03787151351571083, | |
| "learning_rate": 3.58260796752429e-05, | |
| "loss": 0.7008, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 1.4518118735543561, | |
| "grad_norm": 0.03837287053465843, | |
| "learning_rate": 3.5637453093944516e-05, | |
| "loss": 0.6619, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 1.4533538936006167, | |
| "grad_norm": 0.03574013337492943, | |
| "learning_rate": 3.54492166624551e-05, | |
| "loss": 0.5958, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 1.4548959136468773, | |
| "grad_norm": 0.04165033996105194, | |
| "learning_rate": 3.5261371521817244e-05, | |
| "loss": 0.6063, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 1.4564379336931381, | |
| "grad_norm": 0.036275554448366165, | |
| "learning_rate": 3.507391881070161e-05, | |
| "loss": 0.6585, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 1.4579799537393985, | |
| "grad_norm": 0.04543803632259369, | |
| "learning_rate": 3.4886859665400075e-05, | |
| "loss": 0.7924, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 1.4595219737856593, | |
| "grad_norm": 0.04047508165240288, | |
| "learning_rate": 3.470019521981882e-05, | |
| "loss": 0.7316, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 1.4610639938319199, | |
| "grad_norm": 0.042151421308517456, | |
| "learning_rate": 3.45139266054715e-05, | |
| "loss": 0.6866, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 1.4626060138781805, | |
| "grad_norm": 0.03542311489582062, | |
| "learning_rate": 3.432805495147227e-05, | |
| "loss": 0.6577, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 1.464148033924441, | |
| "grad_norm": 0.03901368007063866, | |
| "learning_rate": 3.414258138452898e-05, | |
| "loss": 0.6487, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.4656900539707016, | |
| "grad_norm": 0.03694869950413704, | |
| "learning_rate": 3.395750702893651e-05, | |
| "loss": 0.7043, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 1.4672320740169622, | |
| "grad_norm": 0.038198187947273254, | |
| "learning_rate": 3.377283300656967e-05, | |
| "loss": 0.6513, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 1.4687740940632228, | |
| "grad_norm": 0.03860288858413696, | |
| "learning_rate": 3.358856043687666e-05, | |
| "loss": 0.6888, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 1.4703161141094834, | |
| "grad_norm": 0.04178981855511665, | |
| "learning_rate": 3.340469043687213e-05, | |
| "loss": 0.639, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 1.471858134155744, | |
| "grad_norm": 0.036654602736234665, | |
| "learning_rate": 3.322122412113047e-05, | |
| "loss": 0.6347, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 1.4734001542020048, | |
| "grad_norm": 0.0347287580370903, | |
| "learning_rate": 3.303816260177894e-05, | |
| "loss": 0.4882, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 1.4749421742482651, | |
| "grad_norm": 0.037516217678785324, | |
| "learning_rate": 3.285550698849117e-05, | |
| "loss": 0.6217, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 1.476484194294526, | |
| "grad_norm": 0.03708701953291893, | |
| "learning_rate": 3.267325838848023e-05, | |
| "loss": 0.6542, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 1.4780262143407863, | |
| "grad_norm": 0.03827507793903351, | |
| "learning_rate": 3.2491417906491914e-05, | |
| "loss": 0.5595, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 1.479568234387047, | |
| "grad_norm": 0.03934675455093384, | |
| "learning_rate": 3.230998664479823e-05, | |
| "loss": 0.6574, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.4811102544333077, | |
| "grad_norm": 0.0406733900308609, | |
| "learning_rate": 3.212896570319045e-05, | |
| "loss": 0.645, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 1.4826522744795683, | |
| "grad_norm": 0.040251266211271286, | |
| "learning_rate": 3.194835617897273e-05, | |
| "loss": 0.6699, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 1.4841942945258288, | |
| "grad_norm": 0.041038792580366135, | |
| "learning_rate": 3.176815916695518e-05, | |
| "loss": 0.6631, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 1.4857363145720894, | |
| "grad_norm": 0.03457193076610565, | |
| "learning_rate": 3.158837575944751e-05, | |
| "loss": 0.6236, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 1.48727833461835, | |
| "grad_norm": 0.03977598994970322, | |
| "learning_rate": 3.1409007046252115e-05, | |
| "loss": 0.6281, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 1.4888203546646106, | |
| "grad_norm": 0.03788241744041443, | |
| "learning_rate": 3.123005411465766e-05, | |
| "loss": 0.6207, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 1.4903623747108712, | |
| "grad_norm": 0.038311246782541275, | |
| "learning_rate": 3.105151804943256e-05, | |
| "loss": 0.5844, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 1.4919043947571318, | |
| "grad_norm": 0.03648220747709274, | |
| "learning_rate": 3.087339993281816e-05, | |
| "loss": 0.5752, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 1.4934464148033926, | |
| "grad_norm": 0.03483033925294876, | |
| "learning_rate": 3.06957008445223e-05, | |
| "loss": 0.5375, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 1.494988434849653, | |
| "grad_norm": 0.03250645473599434, | |
| "learning_rate": 3.051842186171284e-05, | |
| "loss": 0.5872, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.4965304548959137, | |
| "grad_norm": 0.04050235077738762, | |
| "learning_rate": 3.0341564059011084e-05, | |
| "loss": 0.6882, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 1.4980724749421743, | |
| "grad_norm": 0.03309236839413643, | |
| "learning_rate": 3.0165128508485164e-05, | |
| "loss": 0.5716, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 1.499614494988435, | |
| "grad_norm": 0.03873175382614136, | |
| "learning_rate": 2.9989116279643636e-05, | |
| "loss": 0.6187, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 1.5011565150346955, | |
| "grad_norm": 0.037314094603061676, | |
| "learning_rate": 2.9813528439429073e-05, | |
| "loss": 0.6247, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 1.502698535080956, | |
| "grad_norm": 0.039977092295885086, | |
| "learning_rate": 2.9638366052211386e-05, | |
| "loss": 0.6382, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.5042405551272167, | |
| "grad_norm": 0.03529144823551178, | |
| "learning_rate": 2.9463630179781587e-05, | |
| "loss": 0.5847, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 1.5057825751734772, | |
| "grad_norm": 0.07699840515851974, | |
| "learning_rate": 2.9289321881345254e-05, | |
| "loss": 0.6286, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 1.5073245952197378, | |
| "grad_norm": 0.037752117961645126, | |
| "learning_rate": 2.9115442213516075e-05, | |
| "loss": 0.5993, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 1.5088666152659984, | |
| "grad_norm": 0.03429201990365982, | |
| "learning_rate": 2.8941992230309478e-05, | |
| "loss": 0.6273, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 1.5104086353122592, | |
| "grad_norm": 0.04487847909331322, | |
| "learning_rate": 2.87689729831363e-05, | |
| "loss": 0.7145, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.5119506553585196, | |
| "grad_norm": 0.03917218744754791, | |
| "learning_rate": 2.8596385520796366e-05, | |
| "loss": 0.6739, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 1.5134926754047804, | |
| "grad_norm": 0.03573921322822571, | |
| "learning_rate": 2.842423088947205e-05, | |
| "loss": 0.6296, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 1.5150346954510407, | |
| "grad_norm": 0.03267597034573555, | |
| "learning_rate": 2.825251013272212e-05, | |
| "loss": 0.557, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 1.5165767154973016, | |
| "grad_norm": 0.03839464113116264, | |
| "learning_rate": 2.8081224291475215e-05, | |
| "loss": 0.6498, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 1.518118735543562, | |
| "grad_norm": 0.03472873941063881, | |
| "learning_rate": 2.7910374404023632e-05, | |
| "loss": 0.63, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 1.5196607555898227, | |
| "grad_norm": 0.04181867465376854, | |
| "learning_rate": 2.7739961506017075e-05, | |
| "loss": 0.6386, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 1.5212027756360833, | |
| "grad_norm": 0.03752240538597107, | |
| "learning_rate": 2.7569986630456333e-05, | |
| "loss": 0.6171, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 1.5227447956823439, | |
| "grad_norm": 0.04248370602726936, | |
| "learning_rate": 2.7400450807686938e-05, | |
| "loss": 0.6596, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 1.5242868157286045, | |
| "grad_norm": 0.038101982325315475, | |
| "learning_rate": 2.7231355065392995e-05, | |
| "loss": 0.5733, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 1.525828835774865, | |
| "grad_norm": 0.03386906534433365, | |
| "learning_rate": 2.7062700428591083e-05, | |
| "loss": 0.5885, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.5273708558211256, | |
| "grad_norm": 0.03431142494082451, | |
| "learning_rate": 2.6894487919623768e-05, | |
| "loss": 0.6046, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 1.5289128758673862, | |
| "grad_norm": 0.03364618495106697, | |
| "learning_rate": 2.6726718558153552e-05, | |
| "loss": 0.5804, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 1.530454895913647, | |
| "grad_norm": 0.03773849830031395, | |
| "learning_rate": 2.6559393361156803e-05, | |
| "loss": 0.671, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 1.5319969159599074, | |
| "grad_norm": 0.040521420538425446, | |
| "learning_rate": 2.6392513342917325e-05, | |
| "loss": 0.6589, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 1.5335389360061682, | |
| "grad_norm": 0.0340166799724102, | |
| "learning_rate": 2.6226079515020507e-05, | |
| "loss": 0.5052, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 1.5350809560524286, | |
| "grad_norm": 0.039063796401023865, | |
| "learning_rate": 2.6060092886346887e-05, | |
| "loss": 0.6083, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 1.5366229760986894, | |
| "grad_norm": 0.035876594483852386, | |
| "learning_rate": 2.5894554463066355e-05, | |
| "loss": 0.5693, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 1.5381649961449497, | |
| "grad_norm": 0.039263173937797546, | |
| "learning_rate": 2.5729465248631735e-05, | |
| "loss": 0.6721, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 1.5397070161912105, | |
| "grad_norm": 0.03354692459106445, | |
| "learning_rate": 2.5564826243772966e-05, | |
| "loss": 0.5877, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 1.5412490362374711, | |
| "grad_norm": 0.04292105510830879, | |
| "learning_rate": 2.540063844649092e-05, | |
| "loss": 0.7288, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.5427910562837317, | |
| "grad_norm": 0.04228365048766136, | |
| "learning_rate": 2.5236902852051314e-05, | |
| "loss": 0.6499, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 1.5443330763299923, | |
| "grad_norm": 0.03898927569389343, | |
| "learning_rate": 2.5073620452978707e-05, | |
| "loss": 0.6452, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 1.5458750963762529, | |
| "grad_norm": 0.04187345877289772, | |
| "learning_rate": 2.4910792239050574e-05, | |
| "loss": 0.6877, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 1.5474171164225135, | |
| "grad_norm": 0.039659738540649414, | |
| "learning_rate": 2.4748419197291216e-05, | |
| "loss": 0.6453, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 1.548959136468774, | |
| "grad_norm": 0.03793085739016533, | |
| "learning_rate": 2.458650231196572e-05, | |
| "loss": 0.5859, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 1.5505011565150348, | |
| "grad_norm": 0.04028444364666939, | |
| "learning_rate": 2.4425042564574184e-05, | |
| "loss": 0.712, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 1.5520431765612952, | |
| "grad_norm": 0.04209078848361969, | |
| "learning_rate": 2.4264040933845533e-05, | |
| "loss": 0.6528, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 1.553585196607556, | |
| "grad_norm": 0.03534756973385811, | |
| "learning_rate": 2.4103498395731748e-05, | |
| "loss": 0.581, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 1.5551272166538164, | |
| "grad_norm": 0.03889233246445656, | |
| "learning_rate": 2.3943415923401925e-05, | |
| "loss": 0.7642, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 1.5566692367000772, | |
| "grad_norm": 0.03933433070778847, | |
| "learning_rate": 2.3783794487236365e-05, | |
| "loss": 0.6585, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.5582112567463375, | |
| "grad_norm": 0.037866294384002686, | |
| "learning_rate": 2.3624635054820633e-05, | |
| "loss": 0.6744, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 1.5597532767925983, | |
| "grad_norm": 0.04123243689537048, | |
| "learning_rate": 2.346593859093974e-05, | |
| "loss": 0.7595, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 1.561295296838859, | |
| "grad_norm": 0.04691806063055992, | |
| "learning_rate": 2.3307706057572355e-05, | |
| "loss": 0.7143, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 1.5628373168851195, | |
| "grad_norm": 0.04137217998504639, | |
| "learning_rate": 2.3149938413884886e-05, | |
| "loss": 0.6601, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 1.56437933693138, | |
| "grad_norm": 0.04006261005997658, | |
| "learning_rate": 2.299263661622566e-05, | |
| "loss": 0.6303, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 1.5659213569776407, | |
| "grad_norm": 0.03742034360766411, | |
| "learning_rate": 2.2835801618119246e-05, | |
| "loss": 0.6286, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 1.5674633770239013, | |
| "grad_norm": 0.03880961611866951, | |
| "learning_rate": 2.2679434370260454e-05, | |
| "loss": 0.6544, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 1.5690053970701618, | |
| "grad_norm": 0.03586026281118393, | |
| "learning_rate": 2.2523535820508847e-05, | |
| "loss": 0.5577, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 1.5705474171164227, | |
| "grad_norm": 0.041177500039339066, | |
| "learning_rate": 2.2368106913882813e-05, | |
| "loss": 0.7577, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 1.572089437162683, | |
| "grad_norm": 0.03928428143262863, | |
| "learning_rate": 2.2213148592553845e-05, | |
| "loss": 0.6413, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.5736314572089438, | |
| "grad_norm": 0.036161281168460846, | |
| "learning_rate": 2.205866179584084e-05, | |
| "loss": 0.6107, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 1.5751734772552042, | |
| "grad_norm": 0.03864715248346329, | |
| "learning_rate": 2.190464746020452e-05, | |
| "loss": 0.6334, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 1.576715497301465, | |
| "grad_norm": 0.03942199423909187, | |
| "learning_rate": 2.1751106519241648e-05, | |
| "loss": 0.6853, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 1.5782575173477256, | |
| "grad_norm": 0.040874917060136795, | |
| "learning_rate": 2.159803990367931e-05, | |
| "loss": 0.7133, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 1.5797995373939862, | |
| "grad_norm": 0.03600339964032173, | |
| "learning_rate": 2.1445448541369396e-05, | |
| "loss": 0.63, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 1.5813415574402467, | |
| "grad_norm": 0.035428039729595184, | |
| "learning_rate": 2.129333335728295e-05, | |
| "loss": 0.6536, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 1.5828835774865073, | |
| "grad_norm": 0.04092607647180557, | |
| "learning_rate": 2.1141695273504503e-05, | |
| "loss": 0.684, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 1.584425597532768, | |
| "grad_norm": 0.04719012603163719, | |
| "learning_rate": 2.0990535209226548e-05, | |
| "loss": 0.7787, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 1.5859676175790285, | |
| "grad_norm": 0.043005432933568954, | |
| "learning_rate": 2.083985408074396e-05, | |
| "loss": 0.7556, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 1.5875096376252893, | |
| "grad_norm": 0.03518006578087807, | |
| "learning_rate": 2.0689652801448366e-05, | |
| "loss": 0.6059, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.5890516576715497, | |
| "grad_norm": 0.0455465205013752, | |
| "learning_rate": 2.0539932281822682e-05, | |
| "loss": 0.7704, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 1.5905936777178105, | |
| "grad_norm": 0.04000212624669075, | |
| "learning_rate": 2.0390693429435627e-05, | |
| "loss": 0.6697, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 1.5921356977640708, | |
| "grad_norm": 0.03780398145318031, | |
| "learning_rate": 2.0241937148936142e-05, | |
| "loss": 0.5924, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 1.5936777178103316, | |
| "grad_norm": 0.036036282777786255, | |
| "learning_rate": 2.00936643420479e-05, | |
| "loss": 0.6255, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 1.595219737856592, | |
| "grad_norm": 0.03216379135847092, | |
| "learning_rate": 1.994587590756397e-05, | |
| "loss": 0.5796, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 1.5967617579028528, | |
| "grad_norm": 0.03556982800364494, | |
| "learning_rate": 1.979857274134115e-05, | |
| "loss": 0.4832, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 1.5983037779491134, | |
| "grad_norm": 0.03701284900307655, | |
| "learning_rate": 1.9651755736294784e-05, | |
| "loss": 0.6202, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 1.599845797995374, | |
| "grad_norm": 0.03629275783896446, | |
| "learning_rate": 1.9505425782393115e-05, | |
| "loss": 0.575, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 1.6013878180416345, | |
| "grad_norm": 0.03774282708764076, | |
| "learning_rate": 1.9359583766652133e-05, | |
| "loss": 0.656, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 1.6029298380878951, | |
| "grad_norm": 0.048092518001794815, | |
| "learning_rate": 1.9214230573129945e-05, | |
| "loss": 0.6851, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.6044718581341557, | |
| "grad_norm": 0.03574126586318016, | |
| "learning_rate": 1.906936708292154e-05, | |
| "loss": 0.606, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 1.6060138781804163, | |
| "grad_norm": 0.03510553762316704, | |
| "learning_rate": 1.892499417415362e-05, | |
| "loss": 0.6359, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 1.607555898226677, | |
| "grad_norm": 0.03518098220229149, | |
| "learning_rate": 1.87811127219789e-05, | |
| "loss": 0.5856, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 1.6090979182729375, | |
| "grad_norm": 0.03950320556759834, | |
| "learning_rate": 1.8637723598571078e-05, | |
| "loss": 0.6158, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 1.6106399383191983, | |
| "grad_norm": 0.04341904819011688, | |
| "learning_rate": 1.849482767311953e-05, | |
| "loss": 0.7325, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 1.6121819583654586, | |
| "grad_norm": 0.04106053337454796, | |
| "learning_rate": 1.835242581182389e-05, | |
| "loss": 0.6351, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 1.6137239784117194, | |
| "grad_norm": 0.048341959714889526, | |
| "learning_rate": 1.8210518877889016e-05, | |
| "loss": 0.8224, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 1.6152659984579798, | |
| "grad_norm": 0.03966255858540535, | |
| "learning_rate": 1.806910773151951e-05, | |
| "loss": 0.6657, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 1.6168080185042406, | |
| "grad_norm": 0.03485183045268059, | |
| "learning_rate": 1.7928193229914747e-05, | |
| "loss": 0.5796, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 1.6183500385505012, | |
| "grad_norm": 0.03906840831041336, | |
| "learning_rate": 1.7787776227263463e-05, | |
| "loss": 0.6774, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.6198920585967618, | |
| "grad_norm": 0.035891421139240265, | |
| "learning_rate": 1.7647857574738756e-05, | |
| "loss": 0.638, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 1.6214340786430224, | |
| "grad_norm": 0.041692256927490234, | |
| "learning_rate": 1.7508438120492867e-05, | |
| "loss": 0.6572, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 1.622976098689283, | |
| "grad_norm": 0.034707244485616684, | |
| "learning_rate": 1.736951870965192e-05, | |
| "loss": 0.5411, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 1.6245181187355435, | |
| "grad_norm": 0.034078020602464676, | |
| "learning_rate": 1.7231100184310956e-05, | |
| "loss": 0.5657, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 1.626060138781804, | |
| "grad_norm": 0.0405205674469471, | |
| "learning_rate": 1.7093183383528778e-05, | |
| "loss": 0.6971, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 1.627602158828065, | |
| "grad_norm": 0.04099133238196373, | |
| "learning_rate": 1.6955769143322897e-05, | |
| "loss": 0.6603, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 1.6291441788743253, | |
| "grad_norm": 0.04224637150764465, | |
| "learning_rate": 1.681885829666432e-05, | |
| "loss": 0.6891, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 1.630686198920586, | |
| "grad_norm": 0.040733352303504944, | |
| "learning_rate": 1.6682451673472733e-05, | |
| "loss": 0.7198, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 1.6322282189668464, | |
| "grad_norm": 0.03536638990044594, | |
| "learning_rate": 1.6546550100611236e-05, | |
| "loss": 0.5982, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 1.6337702390131073, | |
| "grad_norm": 0.03701018542051315, | |
| "learning_rate": 1.6411154401881477e-05, | |
| "loss": 0.6954, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.6353122590593676, | |
| "grad_norm": 0.03783821314573288, | |
| "learning_rate": 1.6276265398018644e-05, | |
| "loss": 0.6432, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 1.6368542791056284, | |
| "grad_norm": 0.03709625452756882, | |
| "learning_rate": 1.6141883906686485e-05, | |
| "loss": 0.5601, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 1.638396299151889, | |
| "grad_norm": 0.04273844510316849, | |
| "learning_rate": 1.6008010742472257e-05, | |
| "loss": 0.6104, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 1.6399383191981496, | |
| "grad_norm": 0.037857066839933395, | |
| "learning_rate": 1.587464671688187e-05, | |
| "loss": 0.7139, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 1.6414803392444102, | |
| "grad_norm": 0.04359933361411095, | |
| "learning_rate": 1.5741792638335095e-05, | |
| "loss": 0.6415, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 1.6430223592906708, | |
| "grad_norm": 0.03557367995381355, | |
| "learning_rate": 1.5609449312160363e-05, | |
| "loss": 0.6213, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 1.6445643793369313, | |
| "grad_norm": 0.03385618329048157, | |
| "learning_rate": 1.5477617540590127e-05, | |
| "loss": 0.6414, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 1.646106399383192, | |
| "grad_norm": 0.0386623814702034, | |
| "learning_rate": 1.534629812275593e-05, | |
| "loss": 0.6734, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 1.6476484194294527, | |
| "grad_norm": 0.03584364429116249, | |
| "learning_rate": 1.5215491854683527e-05, | |
| "loss": 0.6501, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 1.649190439475713, | |
| "grad_norm": 0.045230668038129807, | |
| "learning_rate": 1.5085199529288097e-05, | |
| "loss": 0.7343, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.650732459521974, | |
| "grad_norm": 0.040120597928762436, | |
| "learning_rate": 1.4955421936369451e-05, | |
| "loss": 0.6288, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 1.6522744795682343, | |
| "grad_norm": 0.03697339817881584, | |
| "learning_rate": 1.4826159862607181e-05, | |
| "loss": 0.639, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 1.653816499614495, | |
| "grad_norm": 0.03529459238052368, | |
| "learning_rate": 1.4697414091555916e-05, | |
| "loss": 0.5891, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 1.6553585196607556, | |
| "grad_norm": 0.03791181743144989, | |
| "learning_rate": 1.456918540364065e-05, | |
| "loss": 0.655, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 1.6569005397070162, | |
| "grad_norm": 0.03758023679256439, | |
| "learning_rate": 1.4441474576151915e-05, | |
| "loss": 0.7188, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.6584425597532768, | |
| "grad_norm": 0.04228278622031212, | |
| "learning_rate": 1.4314282383241096e-05, | |
| "loss": 0.6365, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 1.6599845797995374, | |
| "grad_norm": 0.04156757891178131, | |
| "learning_rate": 1.4187609595915696e-05, | |
| "loss": 0.6761, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 1.661526599845798, | |
| "grad_norm": 0.036363374441862106, | |
| "learning_rate": 1.4061456982034816e-05, | |
| "loss": 0.5792, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 1.6630686198920586, | |
| "grad_norm": 0.03510697931051254, | |
| "learning_rate": 1.3935825306304329e-05, | |
| "loss": 0.6059, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 1.6646106399383191, | |
| "grad_norm": 0.03260960057377815, | |
| "learning_rate": 1.3810715330272284e-05, | |
| "loss": 0.5494, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.6661526599845797, | |
| "grad_norm": 0.035467296838760376, | |
| "learning_rate": 1.3686127812324402e-05, | |
| "loss": 0.5637, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 1.6676946800308405, | |
| "grad_norm": 0.03707287460565567, | |
| "learning_rate": 1.3562063507679323e-05, | |
| "loss": 0.5785, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 1.669236700077101, | |
| "grad_norm": 0.04190809279680252, | |
| "learning_rate": 1.3438523168384076e-05, | |
| "loss": 0.6882, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 1.6707787201233617, | |
| "grad_norm": 0.03608112037181854, | |
| "learning_rate": 1.3315507543309625e-05, | |
| "loss": 0.5698, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 1.672320740169622, | |
| "grad_norm": 0.0408920980989933, | |
| "learning_rate": 1.3193017378146211e-05, | |
| "loss": 0.7007, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.6738627602158829, | |
| "grad_norm": 0.03833528980612755, | |
| "learning_rate": 1.3071053415398815e-05, | |
| "loss": 0.6408, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 1.6754047802621435, | |
| "grad_norm": 0.03740300238132477, | |
| "learning_rate": 1.2949616394382802e-05, | |
| "loss": 0.6135, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 1.676946800308404, | |
| "grad_norm": 0.04070223122835159, | |
| "learning_rate": 1.2828707051219258e-05, | |
| "loss": 0.6955, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 1.6784888203546646, | |
| "grad_norm": 0.035887692123651505, | |
| "learning_rate": 1.2708326118830704e-05, | |
| "loss": 0.6414, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 1.6800308404009252, | |
| "grad_norm": 0.044555775821208954, | |
| "learning_rate": 1.2588474326936461e-05, | |
| "loss": 0.7052, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.6815728604471858, | |
| "grad_norm": 0.03866827115416527, | |
| "learning_rate": 1.2469152402048445e-05, | |
| "loss": 0.6768, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 1.6831148804934464, | |
| "grad_norm": 0.03495106101036072, | |
| "learning_rate": 1.2350361067466553e-05, | |
| "loss": 0.56, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 1.6846569005397072, | |
| "grad_norm": 0.03863424435257912, | |
| "learning_rate": 1.2232101043274436e-05, | |
| "loss": 0.6796, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 1.6861989205859675, | |
| "grad_norm": 0.04033857583999634, | |
| "learning_rate": 1.2114373046335059e-05, | |
| "loss": 0.6627, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 1.6877409406322283, | |
| "grad_norm": 0.04083244130015373, | |
| "learning_rate": 1.1997177790286363e-05, | |
| "loss": 0.5977, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.6892829606784887, | |
| "grad_norm": 0.03802114725112915, | |
| "learning_rate": 1.188051598553691e-05, | |
| "loss": 0.6665, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 1.6908249807247495, | |
| "grad_norm": 0.037241131067276, | |
| "learning_rate": 1.1764388339261689e-05, | |
| "loss": 0.5922, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 1.6923670007710099, | |
| "grad_norm": 0.03736341372132301, | |
| "learning_rate": 1.1648795555397719e-05, | |
| "loss": 0.5755, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 1.6939090208172707, | |
| "grad_norm": 0.04216596111655235, | |
| "learning_rate": 1.1533738334639788e-05, | |
| "loss": 0.6045, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 1.6954510408635313, | |
| "grad_norm": 0.04053506255149841, | |
| "learning_rate": 1.1419217374436231e-05, | |
| "loss": 0.6839, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.6969930609097919, | |
| "grad_norm": 0.03894759714603424, | |
| "learning_rate": 1.130523336898479e-05, | |
| "loss": 0.7398, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 1.6985350809560524, | |
| "grad_norm": 0.03372124955058098, | |
| "learning_rate": 1.1191787009228195e-05, | |
| "loss": 0.5823, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 1.700077101002313, | |
| "grad_norm": 0.03881566971540451, | |
| "learning_rate": 1.1078878982850194e-05, | |
| "loss": 0.6236, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 1.7016191210485736, | |
| "grad_norm": 0.03363576531410217, | |
| "learning_rate": 1.0966509974271288e-05, | |
| "loss": 0.5388, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 1.7031611410948342, | |
| "grad_norm": 0.03593594208359718, | |
| "learning_rate": 1.0854680664644534e-05, | |
| "loss": 0.6797, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.704703161141095, | |
| "grad_norm": 0.03860761970281601, | |
| "learning_rate": 1.074339173185147e-05, | |
| "loss": 0.6128, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 1.7062451811873554, | |
| "grad_norm": 0.0354536809027195, | |
| "learning_rate": 1.0632643850498047e-05, | |
| "loss": 0.6126, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 1.7077872012336162, | |
| "grad_norm": 0.03886459395289421, | |
| "learning_rate": 1.0522437691910492e-05, | |
| "loss": 0.6989, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 1.7093292212798765, | |
| "grad_norm": 0.03878238424658775, | |
| "learning_rate": 1.0412773924131203e-05, | |
| "loss": 0.6674, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 1.7108712413261373, | |
| "grad_norm": 0.04125213623046875, | |
| "learning_rate": 1.0303653211914788e-05, | |
| "loss": 0.6411, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.7124132613723977, | |
| "grad_norm": 0.03696538135409355, | |
| "learning_rate": 1.0195076216723931e-05, | |
| "loss": 0.6529, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 1.7139552814186585, | |
| "grad_norm": 0.035270582884550095, | |
| "learning_rate": 1.008704359672551e-05, | |
| "loss": 0.5699, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 1.715497301464919, | |
| "grad_norm": 0.03929029777646065, | |
| "learning_rate": 9.97955600678644e-06, | |
| "loss": 0.6989, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 1.7170393215111797, | |
| "grad_norm": 0.03818931430578232, | |
| "learning_rate": 9.872614098469912e-06, | |
| "loss": 0.6549, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 1.7185813415574402, | |
| "grad_norm": 0.040121689438819885, | |
| "learning_rate": 9.766218520031234e-06, | |
| "loss": 0.7327, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.7201233616037008, | |
| "grad_norm": 0.03708706423640251, | |
| "learning_rate": 9.660369916414014e-06, | |
| "loss": 0.6319, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 1.7216653816499614, | |
| "grad_norm": 0.04111948236823082, | |
| "learning_rate": 9.555068929246324e-06, | |
| "loss": 0.7247, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 1.723207401696222, | |
| "grad_norm": 0.03746362403035164, | |
| "learning_rate": 9.450316196836617e-06, | |
| "loss": 0.6445, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 1.7247494217424828, | |
| "grad_norm": 0.03804723545908928, | |
| "learning_rate": 9.346112354169977e-06, | |
| "loss": 0.6286, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 1.7262914417887432, | |
| "grad_norm": 0.03909954056143761, | |
| "learning_rate": 9.242458032904311e-06, | |
| "loss": 0.6533, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.727833461835004, | |
| "grad_norm": 0.039761465042829514, | |
| "learning_rate": 9.139353861366384e-06, | |
| "loss": 0.6055, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 1.7293754818812643, | |
| "grad_norm": 0.034162942320108414, | |
| "learning_rate": 9.036800464548157e-06, | |
| "loss": 0.5477, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 1.7309175019275251, | |
| "grad_norm": 0.041952334344387054, | |
| "learning_rate": 8.934798464102922e-06, | |
| "loss": 0.6654, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 1.7324595219737855, | |
| "grad_norm": 0.039020415395498276, | |
| "learning_rate": 8.833348478341518e-06, | |
| "loss": 0.697, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 1.7340015420200463, | |
| "grad_norm": 0.03665582835674286, | |
| "learning_rate": 8.732451122228592e-06, | |
| "loss": 0.6294, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.7355435620663069, | |
| "grad_norm": 0.03791512921452522, | |
| "learning_rate": 8.632107007378932e-06, | |
| "loss": 0.6001, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 1.7370855821125675, | |
| "grad_norm": 0.04250357300043106, | |
| "learning_rate": 8.532316742053715e-06, | |
| "loss": 0.6997, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 1.738627602158828, | |
| "grad_norm": 0.03282831981778145, | |
| "learning_rate": 8.433080931156767e-06, | |
| "loss": 0.5166, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 1.7401696222050886, | |
| "grad_norm": 0.03755633533000946, | |
| "learning_rate": 8.334400176230982e-06, | |
| "loss": 0.5834, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 1.7417116422513492, | |
| "grad_norm": 0.0407869890332222, | |
| "learning_rate": 8.236275075454646e-06, | |
| "loss": 0.6973, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.7432536622976098, | |
| "grad_norm": 0.0354437381029129, | |
| "learning_rate": 8.138706223637827e-06, | |
| "loss": 0.5726, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 1.7447956823438706, | |
| "grad_norm": 0.04205944389104843, | |
| "learning_rate": 8.041694212218697e-06, | |
| "loss": 0.6622, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 1.746337702390131, | |
| "grad_norm": 0.043227419257164, | |
| "learning_rate": 7.945239629260038e-06, | |
| "loss": 0.6499, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 1.7478797224363918, | |
| "grad_norm": 0.03981008008122444, | |
| "learning_rate": 7.849343059445635e-06, | |
| "loss": 0.6556, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 1.7494217424826521, | |
| "grad_norm": 0.03791451081633568, | |
| "learning_rate": 7.75400508407671e-06, | |
| "loss": 0.5629, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.750963762528913, | |
| "grad_norm": 0.036014899611473083, | |
| "learning_rate": 7.659226281068443e-06, | |
| "loss": 0.5544, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 1.7525057825751735, | |
| "grad_norm": 0.04287489503622055, | |
| "learning_rate": 7.565007224946485e-06, | |
| "loss": 0.6791, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 1.7540478026214341, | |
| "grad_norm": 0.042246993631124496, | |
| "learning_rate": 7.471348486843355e-06, | |
| "loss": 0.7119, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 1.7555898226676947, | |
| "grad_norm": 0.03575586527585983, | |
| "learning_rate": 7.378250634495143e-06, | |
| "loss": 0.5514, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 1.7571318427139553, | |
| "grad_norm": 0.040197569876909256, | |
| "learning_rate": 7.2857142322379726e-06, | |
| "loss": 0.6751, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.7586738627602159, | |
| "grad_norm": 0.036775026470422745, | |
| "learning_rate": 7.193739841004565e-06, | |
| "loss": 0.6665, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 1.7602158828064765, | |
| "grad_norm": 0.038588572293519974, | |
| "learning_rate": 7.102328018320858e-06, | |
| "loss": 0.6152, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 1.761757902852737, | |
| "grad_norm": 0.04226002097129822, | |
| "learning_rate": 7.011479318302716e-06, | |
| "loss": 0.6287, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 1.7632999228989976, | |
| "grad_norm": 0.03871115297079086, | |
| "learning_rate": 6.9211942916524e-06, | |
| "loss": 0.6324, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 1.7648419429452584, | |
| "grad_norm": 0.04019390419125557, | |
| "learning_rate": 6.8314734856553934e-06, | |
| "loss": 0.6497, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.7663839629915188, | |
| "grad_norm": 0.03871925547719002, | |
| "learning_rate": 6.7423174441769934e-06, | |
| "loss": 0.6618, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 1.7679259830377796, | |
| "grad_norm": 0.03633447363972664, | |
| "learning_rate": 6.653726707659014e-06, | |
| "loss": 0.5637, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 1.76946800308404, | |
| "grad_norm": 0.03641396760940552, | |
| "learning_rate": 6.565701813116543e-06, | |
| "loss": 0.5066, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 1.7710100231303008, | |
| "grad_norm": 0.039006076753139496, | |
| "learning_rate": 6.478243294134678e-06, | |
| "loss": 0.6744, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 1.7725520431765613, | |
| "grad_norm": 0.038571685552597046, | |
| "learning_rate": 6.39135168086531e-06, | |
| "loss": 0.6369, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.774094063222822, | |
| "grad_norm": 0.043034303933382034, | |
| "learning_rate": 6.3050275000238414e-06, | |
| "loss": 0.6785, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 1.7756360832690825, | |
| "grad_norm": 0.04005983844399452, | |
| "learning_rate": 6.219271274886051e-06, | |
| "loss": 0.6623, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 1.777178103315343, | |
| "grad_norm": 0.03346388041973114, | |
| "learning_rate": 6.13408352528495e-06, | |
| "loss": 0.553, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 1.7787201233616037, | |
| "grad_norm": 0.04227742180228233, | |
| "learning_rate": 6.04946476760756e-06, | |
| "loss": 0.7123, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 1.7802621434078643, | |
| "grad_norm": 0.035061586648225784, | |
| "learning_rate": 5.965415514791816e-06, | |
| "loss": 0.6322, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.781804163454125, | |
| "grad_norm": 0.03867385908961296, | |
| "learning_rate": 5.8819362763234635e-06, | |
| "loss": 0.6281, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 1.7833461835003854, | |
| "grad_norm": 0.04091177508234978, | |
| "learning_rate": 5.79902755823295e-06, | |
| "loss": 0.6556, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 1.7848882035466462, | |
| "grad_norm": 0.03809580206871033, | |
| "learning_rate": 5.716689863092361e-06, | |
| "loss": 0.4807, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 1.7864302235929066, | |
| "grad_norm": 0.04020286351442337, | |
| "learning_rate": 5.63492369001245e-06, | |
| "loss": 0.5919, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 1.7879722436391674, | |
| "grad_norm": 0.03693411499261856, | |
| "learning_rate": 5.553729534639462e-06, | |
| "loss": 0.5985, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.7895142636854278, | |
| "grad_norm": 0.040632445365190506, | |
| "learning_rate": 5.47310788915224e-06, | |
| "loss": 0.6933, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 1.7910562837316886, | |
| "grad_norm": 0.04121388867497444, | |
| "learning_rate": 5.3930592422592355e-06, | |
| "loss": 0.5592, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 1.7925983037779492, | |
| "grad_norm": 0.03586176037788391, | |
| "learning_rate": 5.313584079195488e-06, | |
| "loss": 0.5659, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 1.7941403238242097, | |
| "grad_norm": 0.03481077030301094, | |
| "learning_rate": 5.2346828817197655e-06, | |
| "loss": 0.6274, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 1.7956823438704703, | |
| "grad_norm": 0.040142469108104706, | |
| "learning_rate": 5.15635612811155e-06, | |
| "loss": 0.6304, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.797224363916731, | |
| "grad_norm": 0.04349083453416824, | |
| "learning_rate": 5.078604293168232e-06, | |
| "loss": 0.6796, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 1.7987663839629915, | |
| "grad_norm": 0.040206313133239746, | |
| "learning_rate": 5.001427848202145e-06, | |
| "loss": 0.7447, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 1.800308404009252, | |
| "grad_norm": 0.03913164883852005, | |
| "learning_rate": 4.924827261037779e-06, | |
| "loss": 0.645, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 1.8018504240555129, | |
| "grad_norm": 0.03923012688755989, | |
| "learning_rate": 4.848802996008927e-06, | |
| "loss": 0.578, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 1.8033924441017732, | |
| "grad_norm": 0.04153226315975189, | |
| "learning_rate": 4.773355513955846e-06, | |
| "loss": 0.6151, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.804934464148034, | |
| "grad_norm": 0.03950130194425583, | |
| "learning_rate": 4.698485272222431e-06, | |
| "loss": 0.6094, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 1.8064764841942944, | |
| "grad_norm": 0.038277603685855865, | |
| "learning_rate": 4.624192724653565e-06, | |
| "loss": 0.6467, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 1.8080185042405552, | |
| "grad_norm": 0.03700839728116989, | |
| "learning_rate": 4.550478321592277e-06, | |
| "loss": 0.6233, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 1.8095605242868156, | |
| "grad_norm": 0.039141424000263214, | |
| "learning_rate": 4.47734250987697e-06, | |
| "loss": 0.6585, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 1.8111025443330764, | |
| "grad_norm": 0.0396508127450943, | |
| "learning_rate": 4.404785732838846e-06, | |
| "loss": 0.6203, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.812644564379337, | |
| "grad_norm": 0.03907623142004013, | |
| "learning_rate": 4.3328084302990845e-06, | |
| "loss": 0.6842, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 1.8141865844255975, | |
| "grad_norm": 0.0373641662299633, | |
| "learning_rate": 4.261411038566254e-06, | |
| "loss": 0.6387, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 1.8157286044718581, | |
| "grad_norm": 0.03770943358540535, | |
| "learning_rate": 4.190593990433655e-06, | |
| "loss": 0.632, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 1.8172706245181187, | |
| "grad_norm": 0.04551894962787628, | |
| "learning_rate": 4.120357715176703e-06, | |
| "loss": 0.5912, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 1.8188126445643793, | |
| "grad_norm": 0.03960162401199341, | |
| "learning_rate": 4.050702638550275e-06, | |
| "loss": 0.6788, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.8203546646106399, | |
| "grad_norm": 0.04148714616894722, | |
| "learning_rate": 3.981629182786162e-06, | |
| "loss": 0.6767, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 1.8218966846569007, | |
| "grad_norm": 0.03838365152478218, | |
| "learning_rate": 3.913137766590569e-06, | |
| "loss": 0.5993, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 1.823438704703161, | |
| "grad_norm": 0.03973490744829178, | |
| "learning_rate": 3.845228805141477e-06, | |
| "loss": 0.6298, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 1.8249807247494219, | |
| "grad_norm": 0.0381377637386322, | |
| "learning_rate": 3.7779027100861786e-06, | |
| "loss": 0.6554, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 1.8265227447956822, | |
| "grad_norm": 0.03837570175528526, | |
| "learning_rate": 3.7111598895387733e-06, | |
| "loss": 0.6738, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.828064764841943, | |
| "grad_norm": 0.0379825234413147, | |
| "learning_rate": 3.6450007480777093e-06, | |
| "loss": 0.5949, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 1.8296067848882034, | |
| "grad_norm": 0.03953836113214493, | |
| "learning_rate": 3.5794256867432983e-06, | |
| "loss": 0.6, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 1.8311488049344642, | |
| "grad_norm": 0.04130083695054054, | |
| "learning_rate": 3.5144351030353074e-06, | |
| "loss": 0.6276, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 1.8326908249807248, | |
| "grad_norm": 0.04915314167737961, | |
| "learning_rate": 3.45002939091057e-06, | |
| "loss": 0.7502, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 1.8342328450269854, | |
| "grad_norm": 0.04003705456852913, | |
| "learning_rate": 3.386208940780522e-06, | |
| "loss": 0.5908, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.835774865073246, | |
| "grad_norm": 0.038307636976242065, | |
| "learning_rate": 3.3229741395089276e-06, | |
| "loss": 0.6623, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 1.8373168851195065, | |
| "grad_norm": 0.04828540235757828, | |
| "learning_rate": 3.2603253704095005e-06, | |
| "loss": 0.7242, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 1.838858905165767, | |
| "grad_norm": 0.0392216332256794, | |
| "learning_rate": 3.19826301324353e-06, | |
| "loss": 0.6806, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 1.8404009252120277, | |
| "grad_norm": 0.04001534730195999, | |
| "learning_rate": 3.136787444217648e-06, | |
| "loss": 0.6126, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 1.8419429452582885, | |
| "grad_norm": 0.0386841855943203, | |
| "learning_rate": 3.0758990359815334e-06, | |
| "loss": 0.6587, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.8434849653045489, | |
| "grad_norm": 0.03681337460875511, | |
| "learning_rate": 3.0155981576255986e-06, | |
| "loss": 0.6253, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 1.8450269853508097, | |
| "grad_norm": 0.04005776345729828, | |
| "learning_rate": 2.9558851746788517e-06, | |
| "loss": 0.7222, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 1.84656900539707, | |
| "grad_norm": 0.03951365500688553, | |
| "learning_rate": 2.896760449106606e-06, | |
| "loss": 0.6164, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 1.8481110254433308, | |
| "grad_norm": 0.037957701832056046, | |
| "learning_rate": 2.8382243393082595e-06, | |
| "loss": 0.6693, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 1.8496530454895914, | |
| "grad_norm": 0.03840763866901398, | |
| "learning_rate": 2.780277200115222e-06, | |
| "loss": 0.6934, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.851195065535852, | |
| "grad_norm": 0.04111471772193909, | |
| "learning_rate": 2.722919382788691e-06, | |
| "loss": 0.6709, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 1.8527370855821126, | |
| "grad_norm": 0.040044333785772324, | |
| "learning_rate": 2.6661512350175554e-06, | |
| "loss": 0.5842, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 1.8542791056283732, | |
| "grad_norm": 0.033889323472976685, | |
| "learning_rate": 2.609973100916241e-06, | |
| "loss": 0.5468, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 1.8558211256746338, | |
| "grad_norm": 0.040531665086746216, | |
| "learning_rate": 2.5543853210226565e-06, | |
| "loss": 0.6935, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 1.8573631457208943, | |
| "grad_norm": 0.0395374521613121, | |
| "learning_rate": 2.4993882322961738e-06, | |
| "loss": 0.6416, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.858905165767155, | |
| "grad_norm": 0.03926708549261093, | |
| "learning_rate": 2.444982168115528e-06, | |
| "loss": 0.6378, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 1.8604471858134155, | |
| "grad_norm": 0.03875267133116722, | |
| "learning_rate": 2.3911674582767553e-06, | |
| "loss": 0.6103, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 1.8619892058596763, | |
| "grad_norm": 0.03870154917240143, | |
| "learning_rate": 2.3379444289913342e-06, | |
| "loss": 0.614, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 1.8635312259059367, | |
| "grad_norm": 0.040346428751945496, | |
| "learning_rate": 2.2853134028840594e-06, | |
| "loss": 0.6663, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 1.8650732459521975, | |
| "grad_norm": 0.04254676401615143, | |
| "learning_rate": 2.233274698991139e-06, | |
| "loss": 0.6521, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.8666152659984578, | |
| "grad_norm": 0.03783309459686279, | |
| "learning_rate": 2.181828632758354e-06, | |
| "loss": 0.6483, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 1.8681572860447186, | |
| "grad_norm": 0.03636375069618225, | |
| "learning_rate": 2.1309755160389797e-06, | |
| "loss": 0.5717, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 1.8696993060909792, | |
| "grad_norm": 0.03707589954137802, | |
| "learning_rate": 2.080715657092003e-06, | |
| "loss": 0.6379, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 1.8712413261372398, | |
| "grad_norm": 0.04367575794458389, | |
| "learning_rate": 2.0310493605802396e-06, | |
| "loss": 0.6827, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 1.8727833461835004, | |
| "grad_norm": 0.045448847115039825, | |
| "learning_rate": 1.981976927568474e-06, | |
| "loss": 0.7602, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.874325366229761, | |
| "grad_norm": 0.040490083396434784, | |
| "learning_rate": 1.9334986555216373e-06, | |
| "loss": 0.6258, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 1.8758673862760216, | |
| "grad_norm": 0.03627939894795418, | |
| "learning_rate": 1.8856148383029848e-06, | |
| "loss": 0.6405, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 1.8774094063222821, | |
| "grad_norm": 0.03700585290789604, | |
| "learning_rate": 1.8383257661723662e-06, | |
| "loss": 0.6563, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 1.878951426368543, | |
| "grad_norm": 0.036659762263298035, | |
| "learning_rate": 1.7916317257844039e-06, | |
| "loss": 0.6966, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 1.8804934464148033, | |
| "grad_norm": 0.037120621651411057, | |
| "learning_rate": 1.7455330001868054e-06, | |
| "loss": 0.6147, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.8820354664610641, | |
| "grad_norm": 0.0358969122171402, | |
| "learning_rate": 1.700029868818631e-06, | |
| "loss": 0.6476, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 1.8835774865073245, | |
| "grad_norm": 0.04138852283358574, | |
| "learning_rate": 1.6551226075085747e-06, | |
| "loss": 0.6519, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 1.8851195065535853, | |
| "grad_norm": 0.034950390458106995, | |
| "learning_rate": 1.6108114884733183e-06, | |
| "loss": 0.5798, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 1.8866615265998457, | |
| "grad_norm": 0.05633686110377312, | |
| "learning_rate": 1.567096780315891e-06, | |
| "loss": 0.6164, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 1.8882035466461065, | |
| "grad_norm": 0.037615418434143066, | |
| "learning_rate": 1.5239787480240353e-06, | |
| "loss": 0.6013, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.889745566692367, | |
| "grad_norm": 0.039463263005018234, | |
| "learning_rate": 1.4814576529685543e-06, | |
| "loss": 0.6442, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 1.8912875867386276, | |
| "grad_norm": 0.04071315377950668, | |
| "learning_rate": 1.4395337529018116e-06, | |
| "loss": 0.6351, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 1.8928296067848882, | |
| "grad_norm": 0.03899841383099556, | |
| "learning_rate": 1.3982073019560782e-06, | |
| "loss": 0.6408, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 1.8943716268311488, | |
| "grad_norm": 0.03981661796569824, | |
| "learning_rate": 1.3574785506420773e-06, | |
| "loss": 0.5814, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 1.8959136468774094, | |
| "grad_norm": 0.039119165390729904, | |
| "learning_rate": 1.317347745847386e-06, | |
| "loss": 0.5716, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.89745566692367, | |
| "grad_norm": 0.03807598352432251, | |
| "learning_rate": 1.2778151308350139e-06, | |
| "loss": 0.6134, | |
| "step": 1231 | |
| }, | |
| { | |
| "epoch": 1.8989976869699308, | |
| "grad_norm": 0.03812390938401222, | |
| "learning_rate": 1.2388809452418714e-06, | |
| "loss": 0.641, | |
| "step": 1232 | |
| }, | |
| { | |
| "epoch": 1.9005397070161911, | |
| "grad_norm": 0.03916562348604202, | |
| "learning_rate": 1.2005454250773262e-06, | |
| "loss": 0.6881, | |
| "step": 1233 | |
| }, | |
| { | |
| "epoch": 1.902081727062452, | |
| "grad_norm": 0.03635266423225403, | |
| "learning_rate": 1.1628088027218265e-06, | |
| "loss": 0.6259, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 1.9036237471087123, | |
| "grad_norm": 0.03907233849167824, | |
| "learning_rate": 1.1256713069254243e-06, | |
| "loss": 0.6105, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.905165767154973, | |
| "grad_norm": 0.038362421095371246, | |
| "learning_rate": 1.0891331628063884e-06, | |
| "loss": 0.6606, | |
| "step": 1236 | |
| }, | |
| { | |
| "epoch": 1.9067077872012335, | |
| "grad_norm": 0.040085967630147934, | |
| "learning_rate": 1.0531945918499265e-06, | |
| "loss": 0.7064, | |
| "step": 1237 | |
| }, | |
| { | |
| "epoch": 1.9082498072474943, | |
| "grad_norm": 0.03829773887991905, | |
| "learning_rate": 1.0178558119067315e-06, | |
| "loss": 0.6459, | |
| "step": 1238 | |
| }, | |
| { | |
| "epoch": 1.9097918272937549, | |
| "grad_norm": 0.04176430404186249, | |
| "learning_rate": 9.831170371917276e-07, | |
| "loss": 0.6679, | |
| "step": 1239 | |
| }, | |
| { | |
| "epoch": 1.9113338473400154, | |
| "grad_norm": 0.040741175413131714, | |
| "learning_rate": 9.489784782827582e-07, | |
| "loss": 0.7462, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.912875867386276, | |
| "grad_norm": 0.04019416868686676, | |
| "learning_rate": 9.154403421193225e-07, | |
| "loss": 0.6575, | |
| "step": 1241 | |
| }, | |
| { | |
| "epoch": 1.9144178874325366, | |
| "grad_norm": 0.04063234105706215, | |
| "learning_rate": 8.825028320012751e-07, | |
| "loss": 0.7271, | |
| "step": 1242 | |
| }, | |
| { | |
| "epoch": 1.9159599074787972, | |
| "grad_norm": 0.03603971749544144, | |
| "learning_rate": 8.501661475876499e-07, | |
| "loss": 0.6717, | |
| "step": 1243 | |
| }, | |
| { | |
| "epoch": 1.9175019275250578, | |
| "grad_norm": 0.040380485355854034, | |
| "learning_rate": 8.18430484895405e-07, | |
| "loss": 0.6219, | |
| "step": 1244 | |
| }, | |
| { | |
| "epoch": 1.9190439475713186, | |
| "grad_norm": 0.036242734640836716, | |
| "learning_rate": 7.872960362982684e-07, | |
| "loss": 0.6054, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.920585967617579, | |
| "grad_norm": 0.042109616100788116, | |
| "learning_rate": 7.567629905255502e-07, | |
| "loss": 0.6753, | |
| "step": 1246 | |
| }, | |
| { | |
| "epoch": 1.9221279876638397, | |
| "grad_norm": 0.03933633863925934, | |
| "learning_rate": 7.268315326609987e-07, | |
| "loss": 0.6543, | |
| "step": 1247 | |
| }, | |
| { | |
| "epoch": 1.9236700077101, | |
| "grad_norm": 0.03989708423614502, | |
| "learning_rate": 6.975018441417125e-07, | |
| "loss": 0.7069, | |
| "step": 1248 | |
| }, | |
| { | |
| "epoch": 1.925212027756361, | |
| "grad_norm": 0.04395151510834694, | |
| "learning_rate": 6.68774102756975e-07, | |
| "loss": 0.6771, | |
| "step": 1249 | |
| }, | |
| { | |
| "epoch": 1.9267540478026213, | |
| "grad_norm": 0.03848705068230629, | |
| "learning_rate": 6.406484826472548e-07, | |
| "loss": 0.6207, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.928296067848882, | |
| "grad_norm": 0.043200716376304626, | |
| "learning_rate": 6.131251543030847e-07, | |
| "loss": 0.6905, | |
| "step": 1251 | |
| }, | |
| { | |
| "epoch": 1.9298380878951427, | |
| "grad_norm": 0.04006700590252876, | |
| "learning_rate": 5.862042845640403e-07, | |
| "loss": 0.6206, | |
| "step": 1252 | |
| }, | |
| { | |
| "epoch": 1.9313801079414032, | |
| "grad_norm": 0.0446675606071949, | |
| "learning_rate": 5.59886036617785e-07, | |
| "loss": 0.6537, | |
| "step": 1253 | |
| }, | |
| { | |
| "epoch": 1.9329221279876638, | |
| "grad_norm": 0.046989332884550095, | |
| "learning_rate": 5.341705699990152e-07, | |
| "loss": 0.7148, | |
| "step": 1254 | |
| }, | |
| { | |
| "epoch": 1.9344641480339244, | |
| "grad_norm": 0.03648298978805542, | |
| "learning_rate": 5.09058040588506e-07, | |
| "loss": 0.5786, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.936006168080185, | |
| "grad_norm": 0.040052320808172226, | |
| "learning_rate": 4.845486006121891e-07, | |
| "loss": 0.5819, | |
| "step": 1256 | |
| }, | |
| { | |
| "epoch": 1.9375481881264456, | |
| "grad_norm": 0.040988989174366, | |
| "learning_rate": 4.6064239864020973e-07, | |
| "loss": 0.6202, | |
| "step": 1257 | |
| }, | |
| { | |
| "epoch": 1.9390902081727064, | |
| "grad_norm": 0.037767112255096436, | |
| "learning_rate": 4.373395795860713e-07, | |
| "loss": 0.6545, | |
| "step": 1258 | |
| }, | |
| { | |
| "epoch": 1.9406322282189667, | |
| "grad_norm": 0.03612399846315384, | |
| "learning_rate": 4.146402847056474e-07, | |
| "loss": 0.6043, | |
| "step": 1259 | |
| }, | |
| { | |
| "epoch": 1.9421742482652276, | |
| "grad_norm": 0.03655252233147621, | |
| "learning_rate": 3.9254465159646036e-07, | |
| "loss": 0.5824, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.943716268311488, | |
| "grad_norm": 0.03881845995783806, | |
| "learning_rate": 3.7105281419675953e-07, | |
| "loss": 0.6429, | |
| "step": 1261 | |
| }, | |
| { | |
| "epoch": 1.9452582883577487, | |
| "grad_norm": 0.04010866582393646, | |
| "learning_rate": 3.5016490278473315e-07, | |
| "loss": 0.6494, | |
| "step": 1262 | |
| }, | |
| { | |
| "epoch": 1.9468003084040093, | |
| "grad_norm": 0.04136018455028534, | |
| "learning_rate": 3.298810439777311e-07, | |
| "loss": 0.6447, | |
| "step": 1263 | |
| }, | |
| { | |
| "epoch": 1.94834232845027, | |
| "grad_norm": 0.036649253219366074, | |
| "learning_rate": 3.1020136073146575e-07, | |
| "loss": 0.6078, | |
| "step": 1264 | |
| }, | |
| { | |
| "epoch": 1.9498843484965305, | |
| "grad_norm": 0.042641252279281616, | |
| "learning_rate": 2.911259723393123e-07, | |
| "loss": 0.7147, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.951426368542791, | |
| "grad_norm": 0.039730802178382874, | |
| "learning_rate": 2.7265499443154265e-07, | |
| "loss": 0.6759, | |
| "step": 1266 | |
| }, | |
| { | |
| "epoch": 1.9529683885890516, | |
| "grad_norm": 0.03965951129794121, | |
| "learning_rate": 2.547885389746485e-07, | |
| "loss": 0.6032, | |
| "step": 1267 | |
| }, | |
| { | |
| "epoch": 1.9545104086353122, | |
| "grad_norm": 0.03704364597797394, | |
| "learning_rate": 2.3752671427065276e-07, | |
| "loss": 0.5765, | |
| "step": 1268 | |
| }, | |
| { | |
| "epoch": 1.9560524286815728, | |
| "grad_norm": 0.036405693739652634, | |
| "learning_rate": 2.208696249564657e-07, | |
| "loss": 0.6592, | |
| "step": 1269 | |
| }, | |
| { | |
| "epoch": 1.9575944487278334, | |
| "grad_norm": 0.0474606491625309, | |
| "learning_rate": 2.0481737200322982e-07, | |
| "loss": 0.7511, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.9591364687740942, | |
| "grad_norm": 0.0381162092089653, | |
| "learning_rate": 1.893700527157205e-07, | |
| "loss": 0.6218, | |
| "step": 1271 | |
| }, | |
| { | |
| "epoch": 1.9606784888203546, | |
| "grad_norm": 0.043498676270246506, | |
| "learning_rate": 1.7452776073175747e-07, | |
| "loss": 0.7001, | |
| "step": 1272 | |
| }, | |
| { | |
| "epoch": 1.9622205088666154, | |
| "grad_norm": 0.03885050490498543, | |
| "learning_rate": 1.602905860216497e-07, | |
| "loss": 0.5694, | |
| "step": 1273 | |
| }, | |
| { | |
| "epoch": 1.9637625289128757, | |
| "grad_norm": 0.03537154197692871, | |
| "learning_rate": 1.4665861488761813e-07, | |
| "loss": 0.565, | |
| "step": 1274 | |
| }, | |
| { | |
| "epoch": 1.9653045489591365, | |
| "grad_norm": 0.03875518962740898, | |
| "learning_rate": 1.3363192996328488e-07, | |
| "loss": 0.6123, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.9668465690053971, | |
| "grad_norm": 0.035272713750600815, | |
| "learning_rate": 1.212106102131849e-07, | |
| "loss": 0.6273, | |
| "step": 1276 | |
| }, | |
| { | |
| "epoch": 1.9683885890516577, | |
| "grad_norm": 0.03568264842033386, | |
| "learning_rate": 1.0939473093229957e-07, | |
| "loss": 0.5812, | |
| "step": 1277 | |
| }, | |
| { | |
| "epoch": 1.9699306090979183, | |
| "grad_norm": 0.03698897734284401, | |
| "learning_rate": 9.818436374553486e-08, | |
| "loss": 0.5855, | |
| "step": 1278 | |
| }, | |
| { | |
| "epoch": 1.9714726291441789, | |
| "grad_norm": 0.03827029466629028, | |
| "learning_rate": 8.757957660737726e-08, | |
| "loss": 0.6363, | |
| "step": 1279 | |
| }, | |
| { | |
| "epoch": 1.9730146491904395, | |
| "grad_norm": 0.040307458490133286, | |
| "learning_rate": 7.758043380140523e-08, | |
| "loss": 0.7117, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.9745566692367, | |
| "grad_norm": 0.04253824055194855, | |
| "learning_rate": 6.818699593996724e-08, | |
| "loss": 0.6656, | |
| "step": 1281 | |
| }, | |
| { | |
| "epoch": 1.9760986892829608, | |
| "grad_norm": 0.03772187978029251, | |
| "learning_rate": 5.9399319963726606e-08, | |
| "loss": 0.6361, | |
| "step": 1282 | |
| }, | |
| { | |
| "epoch": 1.9776407093292212, | |
| "grad_norm": 0.04309052973985672, | |
| "learning_rate": 5.1217459141406074e-08, | |
| "loss": 0.6891, | |
| "step": 1283 | |
| }, | |
| { | |
| "epoch": 1.979182729375482, | |
| "grad_norm": 0.038349002599716187, | |
| "learning_rate": 4.364146306943262e-08, | |
| "loss": 0.6574, | |
| "step": 1284 | |
| }, | |
| { | |
| "epoch": 1.9807247494217424, | |
| "grad_norm": 0.03721684589982033, | |
| "learning_rate": 3.667137767160433e-08, | |
| "loss": 0.5985, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.9822667694680032, | |
| "grad_norm": 0.037083644419908524, | |
| "learning_rate": 3.0307245198857306e-08, | |
| "loss": 0.6305, | |
| "step": 1286 | |
| }, | |
| { | |
| "epoch": 1.9838087895142635, | |
| "grad_norm": 0.03527013584971428, | |
| "learning_rate": 2.4549104228976938e-08, | |
| "loss": 0.6381, | |
| "step": 1287 | |
| }, | |
| { | |
| "epoch": 1.9853508095605243, | |
| "grad_norm": 0.033729102462530136, | |
| "learning_rate": 1.9396989666398137e-08, | |
| "loss": 0.5759, | |
| "step": 1288 | |
| }, | |
| { | |
| "epoch": 1.986892829606785, | |
| "grad_norm": 0.04133991152048111, | |
| "learning_rate": 1.485093274194993e-08, | |
| "loss": 0.7305, | |
| "step": 1289 | |
| }, | |
| { | |
| "epoch": 1.9884348496530455, | |
| "grad_norm": 0.035960495471954346, | |
| "learning_rate": 1.0910961012711162e-08, | |
| "loss": 0.6279, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.989976869699306, | |
| "grad_norm": 0.0425616018474102, | |
| "learning_rate": 7.577098361810642e-09, | |
| "loss": 0.703, | |
| "step": 1291 | |
| }, | |
| { | |
| "epoch": 1.9915188897455667, | |
| "grad_norm": 0.04597880691289902, | |
| "learning_rate": 4.8493649983050174e-09, | |
| "loss": 0.714, | |
| "step": 1292 | |
| }, | |
| { | |
| "epoch": 1.9930609097918273, | |
| "grad_norm": 0.03732302784919739, | |
| "learning_rate": 2.7277774570233506e-09, | |
| "loss": 0.5934, | |
| "step": 1293 | |
| }, | |
| { | |
| "epoch": 1.9946029298380878, | |
| "grad_norm": 0.03861507400870323, | |
| "learning_rate": 1.2123485985227057e-09, | |
| "loss": 0.4761, | |
| "step": 1294 | |
| }, | |
| { | |
| "epoch": 1.9961449498843487, | |
| "grad_norm": 0.0389426052570343, | |
| "learning_rate": 3.030876089438195e-10, | |
| "loss": 0.56, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.997686969930609, | |
| "grad_norm": 0.0491202138364315, | |
| "learning_rate": 0.0, | |
| "loss": 0.6349, | |
| "step": 1296 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1296, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 324, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.7387631237958468e+19, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |