YWZBrandon's picture
Upload folder using huggingface_hub
3102e25 verified
raw
history blame
64.9 kB
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.0,
"eval_steps": 500,
"global_step": 3608,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.011092623405435386,
"grad_norm": 2.009918689727783,
"learning_rate": 1.998002219755827e-05,
"loss": 0.5022,
"step": 10
},
{
"epoch": 0.022185246810870772,
"grad_norm": 0.49915584921836853,
"learning_rate": 1.995782463928968e-05,
"loss": 0.1943,
"step": 20
},
{
"epoch": 0.033277870216306155,
"grad_norm": 0.47061625123023987,
"learning_rate": 1.993562708102109e-05,
"loss": 0.1436,
"step": 30
},
{
"epoch": 0.044370493621741544,
"grad_norm": 0.2260124832391739,
"learning_rate": 1.99134295227525e-05,
"loss": 0.1106,
"step": 40
},
{
"epoch": 0.05546311702717693,
"grad_norm": 0.31766048073768616,
"learning_rate": 1.989123196448391e-05,
"loss": 0.0994,
"step": 50
},
{
"epoch": 0.06655574043261231,
"grad_norm": 0.2788391709327698,
"learning_rate": 1.9869034406215316e-05,
"loss": 0.111,
"step": 60
},
{
"epoch": 0.0776483638380477,
"grad_norm": 0.24376386404037476,
"learning_rate": 1.9846836847946726e-05,
"loss": 0.0849,
"step": 70
},
{
"epoch": 0.08874098724348309,
"grad_norm": 0.1789788454771042,
"learning_rate": 1.982463928967814e-05,
"loss": 0.0971,
"step": 80
},
{
"epoch": 0.09983361064891846,
"grad_norm": 0.29698437452316284,
"learning_rate": 1.9802441731409546e-05,
"loss": 0.0856,
"step": 90
},
{
"epoch": 0.11092623405435385,
"grad_norm": 0.2553412616252899,
"learning_rate": 1.9780244173140956e-05,
"loss": 0.0976,
"step": 100
},
{
"epoch": 0.12201885745978924,
"grad_norm": 0.20608791708946228,
"learning_rate": 1.9758046614872365e-05,
"loss": 0.0947,
"step": 110
},
{
"epoch": 0.13311148086522462,
"grad_norm": 0.23551669716835022,
"learning_rate": 1.9735849056603775e-05,
"loss": 0.0965,
"step": 120
},
{
"epoch": 0.14420410427066002,
"grad_norm": 0.3207148611545563,
"learning_rate": 1.9713651498335185e-05,
"loss": 0.0931,
"step": 130
},
{
"epoch": 0.1552967276760954,
"grad_norm": 0.34813177585601807,
"learning_rate": 1.9691453940066595e-05,
"loss": 0.1,
"step": 140
},
{
"epoch": 0.16638935108153077,
"grad_norm": 0.41501104831695557,
"learning_rate": 1.9669256381798e-05,
"loss": 0.0922,
"step": 150
},
{
"epoch": 0.17748197448696618,
"grad_norm": 0.24550506472587585,
"learning_rate": 1.964705882352941e-05,
"loss": 0.0857,
"step": 160
},
{
"epoch": 0.18857459789240155,
"grad_norm": 0.4205056130886078,
"learning_rate": 1.9624861265260825e-05,
"loss": 0.0831,
"step": 170
},
{
"epoch": 0.19966722129783693,
"grad_norm": 0.6002993583679199,
"learning_rate": 1.9602663706992235e-05,
"loss": 0.0792,
"step": 180
},
{
"epoch": 0.21075984470327233,
"grad_norm": 0.27535638213157654,
"learning_rate": 1.958046614872364e-05,
"loss": 0.0786,
"step": 190
},
{
"epoch": 0.2218524681087077,
"grad_norm": 0.41602805256843567,
"learning_rate": 1.955826859045505e-05,
"loss": 0.0789,
"step": 200
},
{
"epoch": 0.23294509151414308,
"grad_norm": 0.41827332973480225,
"learning_rate": 1.953607103218646e-05,
"loss": 0.0663,
"step": 210
},
{
"epoch": 0.24403771491957849,
"grad_norm": 0.47976112365722656,
"learning_rate": 1.951387347391787e-05,
"loss": 0.0727,
"step": 220
},
{
"epoch": 0.25513033832501386,
"grad_norm": 0.7771281599998474,
"learning_rate": 1.949167591564928e-05,
"loss": 0.0776,
"step": 230
},
{
"epoch": 0.26622296173044924,
"grad_norm": 1.0573914051055908,
"learning_rate": 1.946947835738069e-05,
"loss": 0.0828,
"step": 240
},
{
"epoch": 0.2773155851358846,
"grad_norm": 0.5186040997505188,
"learning_rate": 1.9447280799112097e-05,
"loss": 0.0767,
"step": 250
},
{
"epoch": 0.28840820854132004,
"grad_norm": 0.5594862699508667,
"learning_rate": 1.942508324084351e-05,
"loss": 0.0699,
"step": 260
},
{
"epoch": 0.2995008319467554,
"grad_norm": 0.5147818922996521,
"learning_rate": 1.940288568257492e-05,
"loss": 0.0591,
"step": 270
},
{
"epoch": 0.3105934553521908,
"grad_norm": 0.5510437488555908,
"learning_rate": 1.938068812430633e-05,
"loss": 0.0761,
"step": 280
},
{
"epoch": 0.32168607875762617,
"grad_norm": 0.4817625880241394,
"learning_rate": 1.9358490566037736e-05,
"loss": 0.0688,
"step": 290
},
{
"epoch": 0.33277870216306155,
"grad_norm": 0.504362940788269,
"learning_rate": 1.9336293007769146e-05,
"loss": 0.0691,
"step": 300
},
{
"epoch": 0.343871325568497,
"grad_norm": 0.5732384920120239,
"learning_rate": 1.9314095449500556e-05,
"loss": 0.057,
"step": 310
},
{
"epoch": 0.35496394897393235,
"grad_norm": 0.5892298221588135,
"learning_rate": 1.9291897891231966e-05,
"loss": 0.059,
"step": 320
},
{
"epoch": 0.36605657237936773,
"grad_norm": 0.3680512309074402,
"learning_rate": 1.9269700332963376e-05,
"loss": 0.0706,
"step": 330
},
{
"epoch": 0.3771491957848031,
"grad_norm": 0.41971662640571594,
"learning_rate": 1.9247502774694786e-05,
"loss": 0.053,
"step": 340
},
{
"epoch": 0.3882418191902385,
"grad_norm": 0.37261390686035156,
"learning_rate": 1.9225305216426195e-05,
"loss": 0.0632,
"step": 350
},
{
"epoch": 0.39933444259567386,
"grad_norm": 0.48256734013557434,
"learning_rate": 1.9203107658157605e-05,
"loss": 0.0651,
"step": 360
},
{
"epoch": 0.4104270660011093,
"grad_norm": 0.7914339303970337,
"learning_rate": 1.9180910099889015e-05,
"loss": 0.0659,
"step": 370
},
{
"epoch": 0.42151968940654466,
"grad_norm": 0.6772429347038269,
"learning_rate": 1.915871254162042e-05,
"loss": 0.0655,
"step": 380
},
{
"epoch": 0.43261231281198004,
"grad_norm": 0.5621687173843384,
"learning_rate": 1.913651498335183e-05,
"loss": 0.0707,
"step": 390
},
{
"epoch": 0.4437049362174154,
"grad_norm": 0.30515748262405396,
"learning_rate": 1.911431742508324e-05,
"loss": 0.0548,
"step": 400
},
{
"epoch": 0.4547975596228508,
"grad_norm": 0.5506859421730042,
"learning_rate": 1.909211986681465e-05,
"loss": 0.0554,
"step": 410
},
{
"epoch": 0.46589018302828616,
"grad_norm": 0.6387749314308167,
"learning_rate": 1.906992230854606e-05,
"loss": 0.0554,
"step": 420
},
{
"epoch": 0.4769828064337216,
"grad_norm": 0.18500734865665436,
"learning_rate": 1.904772475027747e-05,
"loss": 0.0558,
"step": 430
},
{
"epoch": 0.48807542983915697,
"grad_norm": 0.6815407276153564,
"learning_rate": 1.902552719200888e-05,
"loss": 0.0537,
"step": 440
},
{
"epoch": 0.49916805324459235,
"grad_norm": 0.4826994836330414,
"learning_rate": 1.900332963374029e-05,
"loss": 0.0567,
"step": 450
},
{
"epoch": 0.5102606766500277,
"grad_norm": 0.24618124961853027,
"learning_rate": 1.89811320754717e-05,
"loss": 0.0614,
"step": 460
},
{
"epoch": 0.5213533000554631,
"grad_norm": 1.0037415027618408,
"learning_rate": 1.895893451720311e-05,
"loss": 0.0604,
"step": 470
},
{
"epoch": 0.5324459234608985,
"grad_norm": 0.6000948548316956,
"learning_rate": 1.8936736958934517e-05,
"loss": 0.0683,
"step": 480
},
{
"epoch": 0.5435385468663338,
"grad_norm": 0.3302474319934845,
"learning_rate": 1.8914539400665927e-05,
"loss": 0.0543,
"step": 490
},
{
"epoch": 0.5546311702717692,
"grad_norm": 0.5560783743858337,
"learning_rate": 1.8892341842397337e-05,
"loss": 0.0569,
"step": 500
},
{
"epoch": 0.5657237936772047,
"grad_norm": 0.8041097521781921,
"learning_rate": 1.8870144284128747e-05,
"loss": 0.0544,
"step": 510
},
{
"epoch": 0.5768164170826401,
"grad_norm": 0.6846103668212891,
"learning_rate": 1.8847946725860156e-05,
"loss": 0.058,
"step": 520
},
{
"epoch": 0.5879090404880755,
"grad_norm": 0.50434809923172,
"learning_rate": 1.8825749167591566e-05,
"loss": 0.0654,
"step": 530
},
{
"epoch": 0.5990016638935108,
"grad_norm": 0.54362553358078,
"learning_rate": 1.8803551609322976e-05,
"loss": 0.0582,
"step": 540
},
{
"epoch": 0.6100942872989462,
"grad_norm": 0.6166839599609375,
"learning_rate": 1.8781354051054386e-05,
"loss": 0.0672,
"step": 550
},
{
"epoch": 0.6211869107043816,
"grad_norm": 0.4353054165840149,
"learning_rate": 1.8759156492785796e-05,
"loss": 0.0532,
"step": 560
},
{
"epoch": 0.632279534109817,
"grad_norm": 0.6025580167770386,
"learning_rate": 1.8736958934517206e-05,
"loss": 0.0746,
"step": 570
},
{
"epoch": 0.6433721575152523,
"grad_norm": 0.7259892225265503,
"learning_rate": 1.8714761376248612e-05,
"loss": 0.0642,
"step": 580
},
{
"epoch": 0.6544647809206877,
"grad_norm": 0.4940318465232849,
"learning_rate": 1.8692563817980022e-05,
"loss": 0.0547,
"step": 590
},
{
"epoch": 0.6655574043261231,
"grad_norm": 0.7005699872970581,
"learning_rate": 1.8670366259711435e-05,
"loss": 0.0522,
"step": 600
},
{
"epoch": 0.6766500277315585,
"grad_norm": 0.4530707895755768,
"learning_rate": 1.8648168701442845e-05,
"loss": 0.054,
"step": 610
},
{
"epoch": 0.687742651136994,
"grad_norm": 0.9097110629081726,
"learning_rate": 1.8625971143174252e-05,
"loss": 0.0622,
"step": 620
},
{
"epoch": 0.6988352745424293,
"grad_norm": 0.5374599695205688,
"learning_rate": 1.860377358490566e-05,
"loss": 0.0522,
"step": 630
},
{
"epoch": 0.7099278979478647,
"grad_norm": 0.3849945664405823,
"learning_rate": 1.858157602663707e-05,
"loss": 0.0571,
"step": 640
},
{
"epoch": 0.7210205213533001,
"grad_norm": 0.5918008685112,
"learning_rate": 1.855937846836848e-05,
"loss": 0.0576,
"step": 650
},
{
"epoch": 0.7321131447587355,
"grad_norm": 0.3229956030845642,
"learning_rate": 1.853718091009989e-05,
"loss": 0.0537,
"step": 660
},
{
"epoch": 0.7432057681641708,
"grad_norm": 0.5264039039611816,
"learning_rate": 1.85149833518313e-05,
"loss": 0.06,
"step": 670
},
{
"epoch": 0.7542983915696062,
"grad_norm": 0.36795660853385925,
"learning_rate": 1.8492785793562708e-05,
"loss": 0.0511,
"step": 680
},
{
"epoch": 0.7653910149750416,
"grad_norm": 0.5905130505561829,
"learning_rate": 1.847058823529412e-05,
"loss": 0.0636,
"step": 690
},
{
"epoch": 0.776483638380477,
"grad_norm": 0.36266571283340454,
"learning_rate": 1.844839067702553e-05,
"loss": 0.0598,
"step": 700
},
{
"epoch": 0.7875762617859123,
"grad_norm": 0.4978592097759247,
"learning_rate": 1.8426193118756937e-05,
"loss": 0.0607,
"step": 710
},
{
"epoch": 0.7986688851913477,
"grad_norm": 0.4635021686553955,
"learning_rate": 1.8403995560488347e-05,
"loss": 0.0543,
"step": 720
},
{
"epoch": 0.8097615085967831,
"grad_norm": 0.44571858644485474,
"learning_rate": 1.8381798002219757e-05,
"loss": 0.0678,
"step": 730
},
{
"epoch": 0.8208541320022186,
"grad_norm": 0.8265877366065979,
"learning_rate": 1.8359600443951167e-05,
"loss": 0.0552,
"step": 740
},
{
"epoch": 0.831946755407654,
"grad_norm": 0.5776472091674805,
"learning_rate": 1.8337402885682577e-05,
"loss": 0.0505,
"step": 750
},
{
"epoch": 0.8430393788130893,
"grad_norm": 0.299274742603302,
"learning_rate": 1.8315205327413986e-05,
"loss": 0.0619,
"step": 760
},
{
"epoch": 0.8541320022185247,
"grad_norm": 0.587645947933197,
"learning_rate": 1.8293007769145393e-05,
"loss": 0.0575,
"step": 770
},
{
"epoch": 0.8652246256239601,
"grad_norm": 0.39164137840270996,
"learning_rate": 1.8270810210876806e-05,
"loss": 0.0458,
"step": 780
},
{
"epoch": 0.8763172490293955,
"grad_norm": 0.4663292169570923,
"learning_rate": 1.8248612652608216e-05,
"loss": 0.048,
"step": 790
},
{
"epoch": 0.8874098724348308,
"grad_norm": 0.5804581642150879,
"learning_rate": 1.8226415094339626e-05,
"loss": 0.0557,
"step": 800
},
{
"epoch": 0.8985024958402662,
"grad_norm": 0.4279440939426422,
"learning_rate": 1.8204217536071032e-05,
"loss": 0.051,
"step": 810
},
{
"epoch": 0.9095951192457016,
"grad_norm": 0.5384302735328674,
"learning_rate": 1.8182019977802442e-05,
"loss": 0.0558,
"step": 820
},
{
"epoch": 0.920687742651137,
"grad_norm": 0.5049973726272583,
"learning_rate": 1.8159822419533852e-05,
"loss": 0.047,
"step": 830
},
{
"epoch": 0.9317803660565723,
"grad_norm": 0.8061177134513855,
"learning_rate": 1.8137624861265262e-05,
"loss": 0.0572,
"step": 840
},
{
"epoch": 0.9428729894620078,
"grad_norm": 0.6056540012359619,
"learning_rate": 1.8115427302996672e-05,
"loss": 0.0539,
"step": 850
},
{
"epoch": 0.9539656128674432,
"grad_norm": 0.3001384139060974,
"learning_rate": 1.8093229744728082e-05,
"loss": 0.053,
"step": 860
},
{
"epoch": 0.9650582362728786,
"grad_norm": 0.6709749102592468,
"learning_rate": 1.807103218645949e-05,
"loss": 0.0526,
"step": 870
},
{
"epoch": 0.9761508596783139,
"grad_norm": 0.8233507871627808,
"learning_rate": 1.80488346281909e-05,
"loss": 0.0621,
"step": 880
},
{
"epoch": 0.9872434830837493,
"grad_norm": 0.5757150650024414,
"learning_rate": 1.802663706992231e-05,
"loss": 0.0546,
"step": 890
},
{
"epoch": 0.9983361064891847,
"grad_norm": 0.6834889054298401,
"learning_rate": 1.800443951165372e-05,
"loss": 0.0462,
"step": 900
},
{
"epoch": 1.0,
"eval_accuracy": 0.8720027017899359,
"eval_f1": 0.5678449258836944,
"eval_loss": 0.34414270520210266,
"eval_precision": 0.7929936305732485,
"eval_recall": 0.4422735346358792,
"eval_runtime": 2.727,
"eval_samples_per_second": 361.937,
"eval_steps_per_second": 11.368,
"step": 902
},
{
"epoch": 1.0088740987243483,
"grad_norm": 0.4411003887653351,
"learning_rate": 1.7982241953385128e-05,
"loss": 0.0312,
"step": 910
},
{
"epoch": 1.0199667221297837,
"grad_norm": 0.40500083565711975,
"learning_rate": 1.7960044395116538e-05,
"loss": 0.0345,
"step": 920
},
{
"epoch": 1.031059345535219,
"grad_norm": 0.9287449717521667,
"learning_rate": 1.7937846836847947e-05,
"loss": 0.0419,
"step": 930
},
{
"epoch": 1.0421519689406544,
"grad_norm": 0.7969145178794861,
"learning_rate": 1.7915649278579357e-05,
"loss": 0.042,
"step": 940
},
{
"epoch": 1.0532445923460898,
"grad_norm": 0.49280259013175964,
"learning_rate": 1.7893451720310767e-05,
"loss": 0.0427,
"step": 950
},
{
"epoch": 1.0643372157515252,
"grad_norm": 1.1647624969482422,
"learning_rate": 1.7871254162042177e-05,
"loss": 0.0341,
"step": 960
},
{
"epoch": 1.0754298391569606,
"grad_norm": 0.2908968925476074,
"learning_rate": 1.7849056603773587e-05,
"loss": 0.0389,
"step": 970
},
{
"epoch": 1.086522462562396,
"grad_norm": 0.521218478679657,
"learning_rate": 1.7826859045504997e-05,
"loss": 0.0396,
"step": 980
},
{
"epoch": 1.0976150859678313,
"grad_norm": 0.4156598150730133,
"learning_rate": 1.7804661487236407e-05,
"loss": 0.0359,
"step": 990
},
{
"epoch": 1.1087077093732667,
"grad_norm": 0.7260242104530334,
"learning_rate": 1.7782463928967813e-05,
"loss": 0.0384,
"step": 1000
},
{
"epoch": 1.119800332778702,
"grad_norm": 0.43658459186553955,
"learning_rate": 1.7760266370699223e-05,
"loss": 0.0383,
"step": 1010
},
{
"epoch": 1.1308929561841374,
"grad_norm": 0.3585101068019867,
"learning_rate": 1.7738068812430633e-05,
"loss": 0.0401,
"step": 1020
},
{
"epoch": 1.141985579589573,
"grad_norm": 0.5010389089584351,
"learning_rate": 1.7715871254162043e-05,
"loss": 0.0307,
"step": 1030
},
{
"epoch": 1.1530782029950084,
"grad_norm": 0.6415812969207764,
"learning_rate": 1.7693673695893453e-05,
"loss": 0.0423,
"step": 1040
},
{
"epoch": 1.1641708264004438,
"grad_norm": 0.6393259763717651,
"learning_rate": 1.7671476137624862e-05,
"loss": 0.0392,
"step": 1050
},
{
"epoch": 1.1752634498058792,
"grad_norm": 0.6339041590690613,
"learning_rate": 1.7649278579356272e-05,
"loss": 0.0506,
"step": 1060
},
{
"epoch": 1.1863560732113145,
"grad_norm": 0.6086763143539429,
"learning_rate": 1.7627081021087682e-05,
"loss": 0.0416,
"step": 1070
},
{
"epoch": 1.19744869661675,
"grad_norm": 0.6648682355880737,
"learning_rate": 1.7604883462819092e-05,
"loss": 0.0379,
"step": 1080
},
{
"epoch": 1.2085413200221853,
"grad_norm": 0.6018221378326416,
"learning_rate": 1.7582685904550502e-05,
"loss": 0.0354,
"step": 1090
},
{
"epoch": 1.2196339434276207,
"grad_norm": 1.2366654872894287,
"learning_rate": 1.756048834628191e-05,
"loss": 0.037,
"step": 1100
},
{
"epoch": 1.230726566833056,
"grad_norm": 0.5976310968399048,
"learning_rate": 1.7538290788013318e-05,
"loss": 0.0361,
"step": 1110
},
{
"epoch": 1.2418191902384914,
"grad_norm": 0.8290308117866516,
"learning_rate": 1.751609322974473e-05,
"loss": 0.0383,
"step": 1120
},
{
"epoch": 1.2529118136439268,
"grad_norm": 0.382548987865448,
"learning_rate": 1.749389567147614e-05,
"loss": 0.0449,
"step": 1130
},
{
"epoch": 1.2640044370493622,
"grad_norm": 0.5307976007461548,
"learning_rate": 1.7471698113207548e-05,
"loss": 0.0364,
"step": 1140
},
{
"epoch": 1.2750970604547975,
"grad_norm": 0.5508521795272827,
"learning_rate": 1.7449500554938958e-05,
"loss": 0.0312,
"step": 1150
},
{
"epoch": 1.286189683860233,
"grad_norm": 0.37057268619537354,
"learning_rate": 1.7427302996670368e-05,
"loss": 0.0397,
"step": 1160
},
{
"epoch": 1.2972823072656683,
"grad_norm": 0.4806898236274719,
"learning_rate": 1.7405105438401777e-05,
"loss": 0.0391,
"step": 1170
},
{
"epoch": 1.3083749306711037,
"grad_norm": 0.759772002696991,
"learning_rate": 1.7382907880133187e-05,
"loss": 0.0322,
"step": 1180
},
{
"epoch": 1.319467554076539,
"grad_norm": 0.39819085597991943,
"learning_rate": 1.7360710321864597e-05,
"loss": 0.0345,
"step": 1190
},
{
"epoch": 1.3305601774819744,
"grad_norm": 0.7733897566795349,
"learning_rate": 1.7338512763596004e-05,
"loss": 0.0424,
"step": 1200
},
{
"epoch": 1.3416528008874098,
"grad_norm": 0.7979075908660889,
"learning_rate": 1.7316315205327417e-05,
"loss": 0.0403,
"step": 1210
},
{
"epoch": 1.3527454242928454,
"grad_norm": 0.5658752918243408,
"learning_rate": 1.7294117647058827e-05,
"loss": 0.0276,
"step": 1220
},
{
"epoch": 1.3638380476982808,
"grad_norm": 0.25207844376564026,
"learning_rate": 1.7271920088790237e-05,
"loss": 0.0347,
"step": 1230
},
{
"epoch": 1.3749306711037161,
"grad_norm": 0.6338945031166077,
"learning_rate": 1.7249722530521643e-05,
"loss": 0.0416,
"step": 1240
},
{
"epoch": 1.3860232945091515,
"grad_norm": 0.8125913143157959,
"learning_rate": 1.7227524972253053e-05,
"loss": 0.0442,
"step": 1250
},
{
"epoch": 1.397115917914587,
"grad_norm": 0.5067325234413147,
"learning_rate": 1.7205327413984463e-05,
"loss": 0.0358,
"step": 1260
},
{
"epoch": 1.4082085413200223,
"grad_norm": 0.40132638812065125,
"learning_rate": 1.7183129855715873e-05,
"loss": 0.0303,
"step": 1270
},
{
"epoch": 1.4193011647254576,
"grad_norm": 0.6337258815765381,
"learning_rate": 1.7160932297447283e-05,
"loss": 0.0471,
"step": 1280
},
{
"epoch": 1.430393788130893,
"grad_norm": 0.43456393480300903,
"learning_rate": 1.713873473917869e-05,
"loss": 0.0354,
"step": 1290
},
{
"epoch": 1.4414864115363284,
"grad_norm": 0.3291069269180298,
"learning_rate": 1.7116537180910102e-05,
"loss": 0.0312,
"step": 1300
},
{
"epoch": 1.4525790349417638,
"grad_norm": 0.2960388660430908,
"learning_rate": 1.7094339622641512e-05,
"loss": 0.0362,
"step": 1310
},
{
"epoch": 1.4636716583471991,
"grad_norm": 0.820751428604126,
"learning_rate": 1.7072142064372922e-05,
"loss": 0.0396,
"step": 1320
},
{
"epoch": 1.4747642817526345,
"grad_norm": 0.7335907816886902,
"learning_rate": 1.704994450610433e-05,
"loss": 0.0386,
"step": 1330
},
{
"epoch": 1.48585690515807,
"grad_norm": 0.40592002868652344,
"learning_rate": 1.702774694783574e-05,
"loss": 0.0358,
"step": 1340
},
{
"epoch": 1.4969495285635053,
"grad_norm": 0.6194770336151123,
"learning_rate": 1.7005549389567148e-05,
"loss": 0.0361,
"step": 1350
},
{
"epoch": 1.5080421519689406,
"grad_norm": 0.44590774178504944,
"learning_rate": 1.6983351831298558e-05,
"loss": 0.033,
"step": 1360
},
{
"epoch": 1.519134775374376,
"grad_norm": 0.5857370495796204,
"learning_rate": 1.6961154273029968e-05,
"loss": 0.0423,
"step": 1370
},
{
"epoch": 1.5302273987798114,
"grad_norm": 0.6809953451156616,
"learning_rate": 1.6938956714761378e-05,
"loss": 0.0345,
"step": 1380
},
{
"epoch": 1.5413200221852468,
"grad_norm": 0.7102778553962708,
"learning_rate": 1.6916759156492788e-05,
"loss": 0.0372,
"step": 1390
},
{
"epoch": 1.5524126455906821,
"grad_norm": 0.31781554222106934,
"learning_rate": 1.6894561598224198e-05,
"loss": 0.032,
"step": 1400
},
{
"epoch": 1.5635052689961175,
"grad_norm": 0.4304943382740021,
"learning_rate": 1.6872364039955607e-05,
"loss": 0.0375,
"step": 1410
},
{
"epoch": 1.574597892401553,
"grad_norm": 0.918550968170166,
"learning_rate": 1.6850166481687017e-05,
"loss": 0.0416,
"step": 1420
},
{
"epoch": 1.5856905158069883,
"grad_norm": 0.722892701625824,
"learning_rate": 1.6827968923418424e-05,
"loss": 0.0359,
"step": 1430
},
{
"epoch": 1.5967831392124237,
"grad_norm": 0.508703351020813,
"learning_rate": 1.6805771365149834e-05,
"loss": 0.0402,
"step": 1440
},
{
"epoch": 1.607875762617859,
"grad_norm": 0.4725389778614044,
"learning_rate": 1.6783573806881244e-05,
"loss": 0.0335,
"step": 1450
},
{
"epoch": 1.6189683860232944,
"grad_norm": 0.4699971079826355,
"learning_rate": 1.6761376248612653e-05,
"loss": 0.0277,
"step": 1460
},
{
"epoch": 1.6300610094287298,
"grad_norm": 0.775764524936676,
"learning_rate": 1.6739178690344063e-05,
"loss": 0.0333,
"step": 1470
},
{
"epoch": 1.6411536328341652,
"grad_norm": 0.6115106344223022,
"learning_rate": 1.6716981132075473e-05,
"loss": 0.0301,
"step": 1480
},
{
"epoch": 1.6522462562396005,
"grad_norm": 0.743077278137207,
"learning_rate": 1.6694783573806883e-05,
"loss": 0.035,
"step": 1490
},
{
"epoch": 1.663338879645036,
"grad_norm": 0.5189201235771179,
"learning_rate": 1.6672586015538293e-05,
"loss": 0.0475,
"step": 1500
},
{
"epoch": 1.6744315030504713,
"grad_norm": 0.5089607238769531,
"learning_rate": 1.6650388457269703e-05,
"loss": 0.0377,
"step": 1510
},
{
"epoch": 1.6855241264559067,
"grad_norm": 0.46067437529563904,
"learning_rate": 1.6628190899001113e-05,
"loss": 0.0297,
"step": 1520
},
{
"epoch": 1.6966167498613423,
"grad_norm": 0.5661717653274536,
"learning_rate": 1.660599334073252e-05,
"loss": 0.0372,
"step": 1530
},
{
"epoch": 1.7077093732667776,
"grad_norm": 0.45938414335250854,
"learning_rate": 1.658379578246393e-05,
"loss": 0.0371,
"step": 1540
},
{
"epoch": 1.718801996672213,
"grad_norm": 0.7390128970146179,
"learning_rate": 1.656159822419534e-05,
"loss": 0.0332,
"step": 1550
},
{
"epoch": 1.7298946200776484,
"grad_norm": 0.6475571990013123,
"learning_rate": 1.653940066592675e-05,
"loss": 0.0352,
"step": 1560
},
{
"epoch": 1.7409872434830838,
"grad_norm": 0.8529049754142761,
"learning_rate": 1.651720310765816e-05,
"loss": 0.0386,
"step": 1570
},
{
"epoch": 1.7520798668885191,
"grad_norm": 0.6980950236320496,
"learning_rate": 1.649500554938957e-05,
"loss": 0.0358,
"step": 1580
},
{
"epoch": 1.7631724902939545,
"grad_norm": 0.5858293175697327,
"learning_rate": 1.6472807991120978e-05,
"loss": 0.0418,
"step": 1590
},
{
"epoch": 1.7742651136993899,
"grad_norm": 0.49686577916145325,
"learning_rate": 1.6450610432852388e-05,
"loss": 0.0361,
"step": 1600
},
{
"epoch": 1.7853577371048253,
"grad_norm": 0.3631349205970764,
"learning_rate": 1.6428412874583798e-05,
"loss": 0.0352,
"step": 1610
},
{
"epoch": 1.7964503605102606,
"grad_norm": 0.5329940915107727,
"learning_rate": 1.6406215316315204e-05,
"loss": 0.04,
"step": 1620
},
{
"epoch": 1.807542983915696,
"grad_norm": 0.5995050668716431,
"learning_rate": 1.6384017758046614e-05,
"loss": 0.0368,
"step": 1630
},
{
"epoch": 1.8186356073211316,
"grad_norm": 1.3379504680633545,
"learning_rate": 1.6361820199778028e-05,
"loss": 0.0397,
"step": 1640
},
{
"epoch": 1.829728230726567,
"grad_norm": 0.8026002645492554,
"learning_rate": 1.6339622641509437e-05,
"loss": 0.0318,
"step": 1650
},
{
"epoch": 1.8408208541320024,
"grad_norm": 0.917950451374054,
"learning_rate": 1.6317425083240844e-05,
"loss": 0.0352,
"step": 1660
},
{
"epoch": 1.8519134775374377,
"grad_norm": 0.6763226389884949,
"learning_rate": 1.6295227524972254e-05,
"loss": 0.0413,
"step": 1670
},
{
"epoch": 1.8630061009428731,
"grad_norm": 0.5728912949562073,
"learning_rate": 1.6273029966703664e-05,
"loss": 0.0291,
"step": 1680
},
{
"epoch": 1.8740987243483085,
"grad_norm": 0.6339443325996399,
"learning_rate": 1.6250832408435074e-05,
"loss": 0.0409,
"step": 1690
},
{
"epoch": 1.8851913477537439,
"grad_norm": 0.6930853128433228,
"learning_rate": 1.6228634850166483e-05,
"loss": 0.0371,
"step": 1700
},
{
"epoch": 1.8962839711591792,
"grad_norm": 0.37715452909469604,
"learning_rate": 1.6206437291897893e-05,
"loss": 0.031,
"step": 1710
},
{
"epoch": 1.9073765945646146,
"grad_norm": 0.5656572580337524,
"learning_rate": 1.61842397336293e-05,
"loss": 0.0414,
"step": 1720
},
{
"epoch": 1.91846921797005,
"grad_norm": 0.3563915491104126,
"learning_rate": 1.6162042175360713e-05,
"loss": 0.0276,
"step": 1730
},
{
"epoch": 1.9295618413754854,
"grad_norm": 0.8260300159454346,
"learning_rate": 1.6139844617092123e-05,
"loss": 0.0337,
"step": 1740
},
{
"epoch": 1.9406544647809207,
"grad_norm": 0.6907551884651184,
"learning_rate": 1.6117647058823533e-05,
"loss": 0.031,
"step": 1750
},
{
"epoch": 1.9517470881863561,
"grad_norm": 0.5782826542854309,
"learning_rate": 1.609544950055494e-05,
"loss": 0.0419,
"step": 1760
},
{
"epoch": 1.9628397115917915,
"grad_norm": 0.36368125677108765,
"learning_rate": 1.607325194228635e-05,
"loss": 0.0356,
"step": 1770
},
{
"epoch": 1.9739323349972269,
"grad_norm": 0.369911789894104,
"learning_rate": 1.605105438401776e-05,
"loss": 0.0264,
"step": 1780
},
{
"epoch": 1.9850249584026622,
"grad_norm": 0.6072190999984741,
"learning_rate": 1.602885682574917e-05,
"loss": 0.0366,
"step": 1790
},
{
"epoch": 1.9961175818080976,
"grad_norm": 0.9733797311782837,
"learning_rate": 1.600665926748058e-05,
"loss": 0.0371,
"step": 1800
},
{
"epoch": 2.0,
"eval_accuracy": 0.8973319824383654,
"eval_f1": 0.6872427983539094,
"eval_loss": 0.31070804595947266,
"eval_precision": 0.8166259168704156,
"eval_recall": 0.5932504440497336,
"eval_runtime": 2.7114,
"eval_samples_per_second": 364.014,
"eval_steps_per_second": 11.433,
"step": 1804
},
{
"epoch": 2.0066555740432612,
"grad_norm": 0.2766992449760437,
"learning_rate": 1.598446170921199e-05,
"loss": 0.0218,
"step": 1810
},
{
"epoch": 2.0177481974486966,
"grad_norm": 0.6556938290596008,
"learning_rate": 1.59622641509434e-05,
"loss": 0.0187,
"step": 1820
},
{
"epoch": 2.028840820854132,
"grad_norm": 0.24634911119937897,
"learning_rate": 1.5940066592674808e-05,
"loss": 0.0132,
"step": 1830
},
{
"epoch": 2.0399334442595674,
"grad_norm": 0.683587372303009,
"learning_rate": 1.5917869034406218e-05,
"loss": 0.0158,
"step": 1840
},
{
"epoch": 2.0510260676650027,
"grad_norm": 0.47117751836776733,
"learning_rate": 1.5895671476137625e-05,
"loss": 0.018,
"step": 1850
},
{
"epoch": 2.062118691070438,
"grad_norm": 0.4102018475532532,
"learning_rate": 1.5873473917869034e-05,
"loss": 0.0153,
"step": 1860
},
{
"epoch": 2.0732113144758735,
"grad_norm": 0.4639064371585846,
"learning_rate": 1.5851276359600444e-05,
"loss": 0.0171,
"step": 1870
},
{
"epoch": 2.084303937881309,
"grad_norm": 0.49497199058532715,
"learning_rate": 1.5829078801331854e-05,
"loss": 0.0168,
"step": 1880
},
{
"epoch": 2.0953965612867442,
"grad_norm": 0.6302080154418945,
"learning_rate": 1.5806881243063264e-05,
"loss": 0.0191,
"step": 1890
},
{
"epoch": 2.1064891846921796,
"grad_norm": 0.3464473783969879,
"learning_rate": 1.5784683684794674e-05,
"loss": 0.0184,
"step": 1900
},
{
"epoch": 2.117581808097615,
"grad_norm": 0.6507964730262756,
"learning_rate": 1.5762486126526084e-05,
"loss": 0.0135,
"step": 1910
},
{
"epoch": 2.1286744315030504,
"grad_norm": 0.1759006232023239,
"learning_rate": 1.5740288568257494e-05,
"loss": 0.0118,
"step": 1920
},
{
"epoch": 2.1397670549084857,
"grad_norm": 0.5952832698822021,
"learning_rate": 1.5718091009988904e-05,
"loss": 0.0157,
"step": 1930
},
{
"epoch": 2.150859678313921,
"grad_norm": 0.8094580769538879,
"learning_rate": 1.5695893451720313e-05,
"loss": 0.0125,
"step": 1940
},
{
"epoch": 2.1619523017193565,
"grad_norm": 0.6284286379814148,
"learning_rate": 1.567369589345172e-05,
"loss": 0.0178,
"step": 1950
},
{
"epoch": 2.173044925124792,
"grad_norm": 0.6017957925796509,
"learning_rate": 1.565149833518313e-05,
"loss": 0.0136,
"step": 1960
},
{
"epoch": 2.1841375485302272,
"grad_norm": 1.2836827039718628,
"learning_rate": 1.562930077691454e-05,
"loss": 0.014,
"step": 1970
},
{
"epoch": 2.1952301719356626,
"grad_norm": 0.6213756203651428,
"learning_rate": 1.560710321864595e-05,
"loss": 0.0136,
"step": 1980
},
{
"epoch": 2.206322795341098,
"grad_norm": 0.48122018575668335,
"learning_rate": 1.558490566037736e-05,
"loss": 0.0147,
"step": 1990
},
{
"epoch": 2.2174154187465334,
"grad_norm": 0.7161231637001038,
"learning_rate": 1.556270810210877e-05,
"loss": 0.0105,
"step": 2000
},
{
"epoch": 2.2285080421519687,
"grad_norm": 0.6598195433616638,
"learning_rate": 1.554051054384018e-05,
"loss": 0.0172,
"step": 2010
},
{
"epoch": 2.239600665557404,
"grad_norm": 0.6077088117599487,
"learning_rate": 1.551831298557159e-05,
"loss": 0.0144,
"step": 2020
},
{
"epoch": 2.2506932889628395,
"grad_norm": 0.5622262954711914,
"learning_rate": 1.5496115427303e-05,
"loss": 0.0175,
"step": 2030
},
{
"epoch": 2.261785912368275,
"grad_norm": 0.1281885802745819,
"learning_rate": 1.547391786903441e-05,
"loss": 0.018,
"step": 2040
},
{
"epoch": 2.2728785357737102,
"grad_norm": 0.408607542514801,
"learning_rate": 1.5451720310765815e-05,
"loss": 0.0095,
"step": 2050
},
{
"epoch": 2.283971159179146,
"grad_norm": 0.7786557674407959,
"learning_rate": 1.5429522752497225e-05,
"loss": 0.0163,
"step": 2060
},
{
"epoch": 2.2950637825845814,
"grad_norm": 0.22975876927375793,
"learning_rate": 1.5407325194228635e-05,
"loss": 0.0193,
"step": 2070
},
{
"epoch": 2.306156405990017,
"grad_norm": 0.4313502311706543,
"learning_rate": 1.5385127635960048e-05,
"loss": 0.0186,
"step": 2080
},
{
"epoch": 2.317249029395452,
"grad_norm": 0.794448733329773,
"learning_rate": 1.5362930077691455e-05,
"loss": 0.0159,
"step": 2090
},
{
"epoch": 2.3283416528008876,
"grad_norm": 0.4191996157169342,
"learning_rate": 1.5340732519422865e-05,
"loss": 0.0208,
"step": 2100
},
{
"epoch": 2.339434276206323,
"grad_norm": 0.5219516754150391,
"learning_rate": 1.5318534961154274e-05,
"loss": 0.0136,
"step": 2110
},
{
"epoch": 2.3505268996117583,
"grad_norm": 1.1869897842407227,
"learning_rate": 1.5296337402885684e-05,
"loss": 0.0177,
"step": 2120
},
{
"epoch": 2.3616195230171937,
"grad_norm": 0.12048923969268799,
"learning_rate": 1.5274139844617094e-05,
"loss": 0.0129,
"step": 2130
},
{
"epoch": 2.372712146422629,
"grad_norm": 0.5213373899459839,
"learning_rate": 1.5251942286348502e-05,
"loss": 0.0127,
"step": 2140
},
{
"epoch": 2.3838047698280644,
"grad_norm": 0.43704670667648315,
"learning_rate": 1.5229744728079912e-05,
"loss": 0.0154,
"step": 2150
},
{
"epoch": 2.3948973932335,
"grad_norm": 0.567150890827179,
"learning_rate": 1.5207547169811324e-05,
"loss": 0.0117,
"step": 2160
},
{
"epoch": 2.405990016638935,
"grad_norm": 0.571408212184906,
"learning_rate": 1.5185349611542732e-05,
"loss": 0.0152,
"step": 2170
},
{
"epoch": 2.4170826400443706,
"grad_norm": 0.524834394454956,
"learning_rate": 1.5163152053274142e-05,
"loss": 0.017,
"step": 2180
},
{
"epoch": 2.428175263449806,
"grad_norm": 0.5026165246963501,
"learning_rate": 1.5140954495005552e-05,
"loss": 0.0193,
"step": 2190
},
{
"epoch": 2.4392678868552413,
"grad_norm": 0.3944782614707947,
"learning_rate": 1.511875693673696e-05,
"loss": 0.0159,
"step": 2200
},
{
"epoch": 2.4503605102606767,
"grad_norm": 0.6460635662078857,
"learning_rate": 1.509655937846837e-05,
"loss": 0.0139,
"step": 2210
},
{
"epoch": 2.461453133666112,
"grad_norm": 0.5573295950889587,
"learning_rate": 1.507436182019978e-05,
"loss": 0.0183,
"step": 2220
},
{
"epoch": 2.4725457570715474,
"grad_norm": 0.5001458525657654,
"learning_rate": 1.5052164261931188e-05,
"loss": 0.0167,
"step": 2230
},
{
"epoch": 2.483638380476983,
"grad_norm": 1.0214160680770874,
"learning_rate": 1.5029966703662598e-05,
"loss": 0.0182,
"step": 2240
},
{
"epoch": 2.494731003882418,
"grad_norm": 0.5756349563598633,
"learning_rate": 1.5007769145394009e-05,
"loss": 0.0164,
"step": 2250
},
{
"epoch": 2.5058236272878536,
"grad_norm": 0.771986722946167,
"learning_rate": 1.4985571587125419e-05,
"loss": 0.0183,
"step": 2260
},
{
"epoch": 2.516916250693289,
"grad_norm": 0.5560303926467896,
"learning_rate": 1.4963374028856827e-05,
"loss": 0.0178,
"step": 2270
},
{
"epoch": 2.5280088740987243,
"grad_norm": 0.8550804853439331,
"learning_rate": 1.4941176470588237e-05,
"loss": 0.0118,
"step": 2280
},
{
"epoch": 2.5391014975041597,
"grad_norm": 0.8129355907440186,
"learning_rate": 1.4918978912319645e-05,
"loss": 0.0126,
"step": 2290
},
{
"epoch": 2.550194120909595,
"grad_norm": 1.176377773284912,
"learning_rate": 1.4896781354051055e-05,
"loss": 0.0126,
"step": 2300
},
{
"epoch": 2.5612867443150305,
"grad_norm": 0.1419341266155243,
"learning_rate": 1.4874583795782465e-05,
"loss": 0.0196,
"step": 2310
},
{
"epoch": 2.572379367720466,
"grad_norm": 1.4311761856079102,
"learning_rate": 1.4852386237513873e-05,
"loss": 0.0143,
"step": 2320
},
{
"epoch": 2.583471991125901,
"grad_norm": 0.9924690127372742,
"learning_rate": 1.4830188679245283e-05,
"loss": 0.0198,
"step": 2330
},
{
"epoch": 2.5945646145313366,
"grad_norm": 0.6496040225028992,
"learning_rate": 1.4807991120976695e-05,
"loss": 0.014,
"step": 2340
},
{
"epoch": 2.605657237936772,
"grad_norm": 0.2335842400789261,
"learning_rate": 1.4785793562708104e-05,
"loss": 0.0128,
"step": 2350
},
{
"epoch": 2.6167498613422073,
"grad_norm": 0.6549142003059387,
"learning_rate": 1.4763596004439513e-05,
"loss": 0.019,
"step": 2360
},
{
"epoch": 2.6278424847476427,
"grad_norm": 0.3264187276363373,
"learning_rate": 1.4741398446170922e-05,
"loss": 0.0168,
"step": 2370
},
{
"epoch": 2.638935108153078,
"grad_norm": 0.4175679683685303,
"learning_rate": 1.4719200887902332e-05,
"loss": 0.0122,
"step": 2380
},
{
"epoch": 2.6500277315585135,
"grad_norm": 0.2968366742134094,
"learning_rate": 1.469700332963374e-05,
"loss": 0.0189,
"step": 2390
},
{
"epoch": 2.661120354963949,
"grad_norm": 1.5489327907562256,
"learning_rate": 1.467480577136515e-05,
"loss": 0.0158,
"step": 2400
},
{
"epoch": 2.672212978369384,
"grad_norm": 0.8017039895057678,
"learning_rate": 1.465260821309656e-05,
"loss": 0.0178,
"step": 2410
},
{
"epoch": 2.6833056017748196,
"grad_norm": 0.6136099100112915,
"learning_rate": 1.4630410654827972e-05,
"loss": 0.0164,
"step": 2420
},
{
"epoch": 2.6943982251802554,
"grad_norm": 0.5461011528968811,
"learning_rate": 1.460821309655938e-05,
"loss": 0.0168,
"step": 2430
},
{
"epoch": 2.7054908485856908,
"grad_norm": 0.2680525779724121,
"learning_rate": 1.458601553829079e-05,
"loss": 0.0138,
"step": 2440
},
{
"epoch": 2.716583471991126,
"grad_norm": 0.6884289383888245,
"learning_rate": 1.45638179800222e-05,
"loss": 0.0145,
"step": 2450
},
{
"epoch": 2.7276760953965615,
"grad_norm": 1.226928472518921,
"learning_rate": 1.4541620421753608e-05,
"loss": 0.0145,
"step": 2460
},
{
"epoch": 2.738768718801997,
"grad_norm": 0.447221040725708,
"learning_rate": 1.4519422863485018e-05,
"loss": 0.0168,
"step": 2470
},
{
"epoch": 2.7498613422074323,
"grad_norm": 0.47167718410491943,
"learning_rate": 1.4497225305216428e-05,
"loss": 0.0124,
"step": 2480
},
{
"epoch": 2.7609539656128677,
"grad_norm": 1.326282262802124,
"learning_rate": 1.4475027746947836e-05,
"loss": 0.0183,
"step": 2490
},
{
"epoch": 2.772046589018303,
"grad_norm": 0.9036744236946106,
"learning_rate": 1.4452830188679246e-05,
"loss": 0.023,
"step": 2500
},
{
"epoch": 2.7831392124237384,
"grad_norm": 0.30263999104499817,
"learning_rate": 1.4430632630410657e-05,
"loss": 0.0163,
"step": 2510
},
{
"epoch": 2.794231835829174,
"grad_norm": 0.7316049933433533,
"learning_rate": 1.4408435072142067e-05,
"loss": 0.0156,
"step": 2520
},
{
"epoch": 2.805324459234609,
"grad_norm": 0.8268038630485535,
"learning_rate": 1.4386237513873475e-05,
"loss": 0.0169,
"step": 2530
},
{
"epoch": 2.8164170826400445,
"grad_norm": 0.633816659450531,
"learning_rate": 1.4364039955604885e-05,
"loss": 0.0147,
"step": 2540
},
{
"epoch": 2.82750970604548,
"grad_norm": 0.43913397192955017,
"learning_rate": 1.4341842397336295e-05,
"loss": 0.0122,
"step": 2550
},
{
"epoch": 2.8386023294509153,
"grad_norm": 0.37594836950302124,
"learning_rate": 1.4319644839067703e-05,
"loss": 0.0149,
"step": 2560
},
{
"epoch": 2.8496949528563507,
"grad_norm": 0.5090307593345642,
"learning_rate": 1.4297447280799113e-05,
"loss": 0.015,
"step": 2570
},
{
"epoch": 2.860787576261786,
"grad_norm": 0.47723984718322754,
"learning_rate": 1.4275249722530521e-05,
"loss": 0.0166,
"step": 2580
},
{
"epoch": 2.8718801996672214,
"grad_norm": 1.2512151002883911,
"learning_rate": 1.4253052164261931e-05,
"loss": 0.0136,
"step": 2590
},
{
"epoch": 2.882972823072657,
"grad_norm": 0.676337718963623,
"learning_rate": 1.4230854605993343e-05,
"loss": 0.0152,
"step": 2600
},
{
"epoch": 2.894065446478092,
"grad_norm": 0.7379328012466431,
"learning_rate": 1.4208657047724752e-05,
"loss": 0.0182,
"step": 2610
},
{
"epoch": 2.9051580698835275,
"grad_norm": 0.8266171813011169,
"learning_rate": 1.418645948945616e-05,
"loss": 0.0207,
"step": 2620
},
{
"epoch": 2.916250693288963,
"grad_norm": 0.9925495982170105,
"learning_rate": 1.416426193118757e-05,
"loss": 0.0157,
"step": 2630
},
{
"epoch": 2.9273433166943983,
"grad_norm": 1.1120530366897583,
"learning_rate": 1.414206437291898e-05,
"loss": 0.0164,
"step": 2640
},
{
"epoch": 2.9384359400998337,
"grad_norm": 0.9687446355819702,
"learning_rate": 1.4119866814650389e-05,
"loss": 0.0211,
"step": 2650
},
{
"epoch": 2.949528563505269,
"grad_norm": 0.19811566174030304,
"learning_rate": 1.4097669256381798e-05,
"loss": 0.0181,
"step": 2660
},
{
"epoch": 2.9606211869107044,
"grad_norm": 0.9253482818603516,
"learning_rate": 1.4075471698113208e-05,
"loss": 0.0158,
"step": 2670
},
{
"epoch": 2.97171381031614,
"grad_norm": 0.677994430065155,
"learning_rate": 1.405327413984462e-05,
"loss": 0.024,
"step": 2680
},
{
"epoch": 2.982806433721575,
"grad_norm": 0.3959580361843109,
"learning_rate": 1.4031076581576028e-05,
"loss": 0.009,
"step": 2690
},
{
"epoch": 2.9938990571270105,
"grad_norm": 1.0569937229156494,
"learning_rate": 1.4008879023307438e-05,
"loss": 0.0187,
"step": 2700
},
{
"epoch": 3.0,
"eval_accuracy": 0.8888888888888888,
"eval_f1": 0.7171109200343938,
"eval_loss": 0.3638182580471039,
"eval_precision": 0.695,
"eval_recall": 0.7406749555950266,
"eval_runtime": 2.7252,
"eval_samples_per_second": 362.177,
"eval_steps_per_second": 11.375,
"step": 2706
},
{
"epoch": 3.004437049362174,
"grad_norm": 0.22475391626358032,
"learning_rate": 1.3986681465038848e-05,
"loss": 0.0127,
"step": 2710
},
{
"epoch": 3.0155296727676095,
"grad_norm": 0.42929062247276306,
"learning_rate": 1.3964483906770256e-05,
"loss": 0.0131,
"step": 2720
},
{
"epoch": 3.026622296173045,
"grad_norm": 2.398026466369629,
"learning_rate": 1.3942286348501666e-05,
"loss": 0.0072,
"step": 2730
},
{
"epoch": 3.0377149195784803,
"grad_norm": 0.14908376336097717,
"learning_rate": 1.3920088790233076e-05,
"loss": 0.0082,
"step": 2740
},
{
"epoch": 3.0488075429839157,
"grad_norm": 0.8177769184112549,
"learning_rate": 1.3897891231964484e-05,
"loss": 0.0088,
"step": 2750
},
{
"epoch": 3.059900166389351,
"grad_norm": 0.13737396895885468,
"learning_rate": 1.3875693673695894e-05,
"loss": 0.008,
"step": 2760
},
{
"epoch": 3.0709927897947864,
"grad_norm": 0.7825105786323547,
"learning_rate": 1.3853496115427305e-05,
"loss": 0.0034,
"step": 2770
},
{
"epoch": 3.082085413200222,
"grad_norm": 0.42062872648239136,
"learning_rate": 1.3831298557158715e-05,
"loss": 0.0062,
"step": 2780
},
{
"epoch": 3.093178036605657,
"grad_norm": 0.7224815487861633,
"learning_rate": 1.3809100998890123e-05,
"loss": 0.0055,
"step": 2790
},
{
"epoch": 3.1042706600110925,
"grad_norm": 0.04094177484512329,
"learning_rate": 1.3786903440621533e-05,
"loss": 0.002,
"step": 2800
},
{
"epoch": 3.115363283416528,
"grad_norm": 0.03310002386569977,
"learning_rate": 1.3764705882352943e-05,
"loss": 0.0055,
"step": 2810
},
{
"epoch": 3.1264559068219633,
"grad_norm": 0.5118809938430786,
"learning_rate": 1.3742508324084351e-05,
"loss": 0.0054,
"step": 2820
},
{
"epoch": 3.1375485302273987,
"grad_norm": 0.5759711861610413,
"learning_rate": 1.3720310765815761e-05,
"loss": 0.0086,
"step": 2830
},
{
"epoch": 3.148641153632834,
"grad_norm": 1.1310100555419922,
"learning_rate": 1.3698113207547171e-05,
"loss": 0.0091,
"step": 2840
},
{
"epoch": 3.1597337770382694,
"grad_norm": 0.04265379533171654,
"learning_rate": 1.3675915649278579e-05,
"loss": 0.0061,
"step": 2850
},
{
"epoch": 3.170826400443705,
"grad_norm": 0.6648514270782471,
"learning_rate": 1.365371809100999e-05,
"loss": 0.0035,
"step": 2860
},
{
"epoch": 3.18191902384914,
"grad_norm": 0.02486938051879406,
"learning_rate": 1.36315205327414e-05,
"loss": 0.0055,
"step": 2870
},
{
"epoch": 3.1930116472545755,
"grad_norm": 0.5384965538978577,
"learning_rate": 1.3609322974472809e-05,
"loss": 0.0029,
"step": 2880
},
{
"epoch": 3.204104270660011,
"grad_norm": 0.6231627464294434,
"learning_rate": 1.3587125416204219e-05,
"loss": 0.005,
"step": 2890
},
{
"epoch": 3.2151968940654463,
"grad_norm": 0.5190199613571167,
"learning_rate": 1.3564927857935628e-05,
"loss": 0.0083,
"step": 2900
},
{
"epoch": 3.2262895174708817,
"grad_norm": 0.39337947964668274,
"learning_rate": 1.3542730299667037e-05,
"loss": 0.0081,
"step": 2910
},
{
"epoch": 3.237382140876317,
"grad_norm": 0.6862775683403015,
"learning_rate": 1.3520532741398446e-05,
"loss": 0.0074,
"step": 2920
},
{
"epoch": 3.2484747642817524,
"grad_norm": 1.0862469673156738,
"learning_rate": 1.3498335183129856e-05,
"loss": 0.0074,
"step": 2930
},
{
"epoch": 3.259567387687188,
"grad_norm": 1.1846354007720947,
"learning_rate": 1.3476137624861268e-05,
"loss": 0.0075,
"step": 2940
},
{
"epoch": 3.270660011092623,
"grad_norm": 1.2481039762496948,
"learning_rate": 1.3453940066592676e-05,
"loss": 0.003,
"step": 2950
},
{
"epoch": 3.281752634498059,
"grad_norm": 0.3161996006965637,
"learning_rate": 1.3431742508324086e-05,
"loss": 0.0034,
"step": 2960
},
{
"epoch": 3.2928452579034944,
"grad_norm": 0.6262934803962708,
"learning_rate": 1.3409544950055496e-05,
"loss": 0.0107,
"step": 2970
},
{
"epoch": 3.3039378813089297,
"grad_norm": 0.9278308153152466,
"learning_rate": 1.3387347391786904e-05,
"loss": 0.0057,
"step": 2980
},
{
"epoch": 3.315030504714365,
"grad_norm": 0.40893518924713135,
"learning_rate": 1.3365149833518314e-05,
"loss": 0.0041,
"step": 2990
},
{
"epoch": 3.3261231281198005,
"grad_norm": 0.6295328736305237,
"learning_rate": 1.3342952275249724e-05,
"loss": 0.0032,
"step": 3000
},
{
"epoch": 3.337215751525236,
"grad_norm": 0.039073534309864044,
"learning_rate": 1.3320754716981132e-05,
"loss": 0.0067,
"step": 3010
},
{
"epoch": 3.3483083749306712,
"grad_norm": 0.5157305002212524,
"learning_rate": 1.3298557158712542e-05,
"loss": 0.0093,
"step": 3020
},
{
"epoch": 3.3594009983361066,
"grad_norm": 0.9451714158058167,
"learning_rate": 1.3276359600443953e-05,
"loss": 0.0064,
"step": 3030
},
{
"epoch": 3.370493621741542,
"grad_norm": 0.043243326246738434,
"learning_rate": 1.3254162042175363e-05,
"loss": 0.0062,
"step": 3040
},
{
"epoch": 3.3815862451469774,
"grad_norm": 1.2751350402832031,
"learning_rate": 1.3231964483906771e-05,
"loss": 0.011,
"step": 3050
},
{
"epoch": 3.3926788685524127,
"grad_norm": 1.1212610006332397,
"learning_rate": 1.3209766925638181e-05,
"loss": 0.008,
"step": 3060
},
{
"epoch": 3.403771491957848,
"grad_norm": 0.3985590934753418,
"learning_rate": 1.3187569367369591e-05,
"loss": 0.0066,
"step": 3070
},
{
"epoch": 3.4148641153632835,
"grad_norm": 0.6546558737754822,
"learning_rate": 1.3165371809101e-05,
"loss": 0.0115,
"step": 3080
},
{
"epoch": 3.425956738768719,
"grad_norm": 1.0568732023239136,
"learning_rate": 1.3143174250832409e-05,
"loss": 0.0104,
"step": 3090
},
{
"epoch": 3.4370493621741542,
"grad_norm": 0.9182056188583374,
"learning_rate": 1.3120976692563819e-05,
"loss": 0.0079,
"step": 3100
},
{
"epoch": 3.4481419855795896,
"grad_norm": 1.700160026550293,
"learning_rate": 1.3098779134295227e-05,
"loss": 0.0061,
"step": 3110
},
{
"epoch": 3.459234608985025,
"grad_norm": 0.09296636283397675,
"learning_rate": 1.3076581576026639e-05,
"loss": 0.0106,
"step": 3120
},
{
"epoch": 3.4703272323904604,
"grad_norm": 0.8721600770950317,
"learning_rate": 1.3054384017758049e-05,
"loss": 0.0058,
"step": 3130
},
{
"epoch": 3.4814198557958957,
"grad_norm": 0.5778414607048035,
"learning_rate": 1.3032186459489457e-05,
"loss": 0.0075,
"step": 3140
},
{
"epoch": 3.492512479201331,
"grad_norm": 0.632172167301178,
"learning_rate": 1.3009988901220867e-05,
"loss": 0.0057,
"step": 3150
},
{
"epoch": 3.5036051026067665,
"grad_norm": 0.49320322275161743,
"learning_rate": 1.2987791342952276e-05,
"loss": 0.0065,
"step": 3160
},
{
"epoch": 3.514697726012202,
"grad_norm": 1.2454118728637695,
"learning_rate": 1.2965593784683685e-05,
"loss": 0.0049,
"step": 3170
},
{
"epoch": 3.5257903494176372,
"grad_norm": 0.16420379281044006,
"learning_rate": 1.2943396226415095e-05,
"loss": 0.0042,
"step": 3180
},
{
"epoch": 3.5368829728230726,
"grad_norm": 0.7556213736534119,
"learning_rate": 1.2921198668146504e-05,
"loss": 0.0076,
"step": 3190
},
{
"epoch": 3.547975596228508,
"grad_norm": 0.7741811871528625,
"learning_rate": 1.2899001109877916e-05,
"loss": 0.0053,
"step": 3200
},
{
"epoch": 3.5590682196339434,
"grad_norm": 0.3981403708457947,
"learning_rate": 1.2876803551609324e-05,
"loss": 0.0111,
"step": 3210
},
{
"epoch": 3.5701608430393788,
"grad_norm": 1.556243658065796,
"learning_rate": 1.2854605993340734e-05,
"loss": 0.0082,
"step": 3220
},
{
"epoch": 3.581253466444814,
"grad_norm": 0.24557125568389893,
"learning_rate": 1.2832408435072144e-05,
"loss": 0.0048,
"step": 3230
},
{
"epoch": 3.5923460898502495,
"grad_norm": 1.2477893829345703,
"learning_rate": 1.2810210876803552e-05,
"loss": 0.0023,
"step": 3240
},
{
"epoch": 3.603438713255685,
"grad_norm": 0.04675845056772232,
"learning_rate": 1.2788013318534962e-05,
"loss": 0.0041,
"step": 3250
},
{
"epoch": 3.6145313366611203,
"grad_norm": 0.571173906326294,
"learning_rate": 1.2765815760266372e-05,
"loss": 0.0055,
"step": 3260
},
{
"epoch": 3.6256239600665556,
"grad_norm": 0.5565969944000244,
"learning_rate": 1.274361820199778e-05,
"loss": 0.0092,
"step": 3270
},
{
"epoch": 3.636716583471991,
"grad_norm": 0.013311430811882019,
"learning_rate": 1.272142064372919e-05,
"loss": 0.0071,
"step": 3280
},
{
"epoch": 3.6478092068774264,
"grad_norm": 1.6714210510253906,
"learning_rate": 1.2699223085460601e-05,
"loss": 0.0069,
"step": 3290
},
{
"epoch": 3.6589018302828618,
"grad_norm": 0.7213656306266785,
"learning_rate": 1.2677025527192011e-05,
"loss": 0.0053,
"step": 3300
},
{
"epoch": 3.669994453688297,
"grad_norm": 0.9073006510734558,
"learning_rate": 1.265482796892342e-05,
"loss": 0.0101,
"step": 3310
},
{
"epoch": 3.6810870770937325,
"grad_norm": 0.29895493388175964,
"learning_rate": 1.263263041065483e-05,
"loss": 0.0052,
"step": 3320
},
{
"epoch": 3.6921797004991683,
"grad_norm": 1.0842756032943726,
"learning_rate": 1.2610432852386239e-05,
"loss": 0.008,
"step": 3330
},
{
"epoch": 3.7032723239046037,
"grad_norm": 0.7172666192054749,
"learning_rate": 1.2588235294117647e-05,
"loss": 0.01,
"step": 3340
},
{
"epoch": 3.714364947310039,
"grad_norm": 0.10475369542837143,
"learning_rate": 1.2566037735849057e-05,
"loss": 0.0063,
"step": 3350
},
{
"epoch": 3.7254575707154745,
"grad_norm": 0.9639983177185059,
"learning_rate": 1.2543840177580467e-05,
"loss": 0.0059,
"step": 3360
},
{
"epoch": 3.73655019412091,
"grad_norm": 0.9579765796661377,
"learning_rate": 1.2521642619311875e-05,
"loss": 0.0053,
"step": 3370
},
{
"epoch": 3.747642817526345,
"grad_norm": 0.14046674966812134,
"learning_rate": 1.2499445061043287e-05,
"loss": 0.0081,
"step": 3380
},
{
"epoch": 3.7587354409317806,
"grad_norm": 0.4136715233325958,
"learning_rate": 1.2477247502774697e-05,
"loss": 0.0101,
"step": 3390
},
{
"epoch": 3.769828064337216,
"grad_norm": 0.30935949087142944,
"learning_rate": 1.2455049944506107e-05,
"loss": 0.0046,
"step": 3400
},
{
"epoch": 3.7809206877426513,
"grad_norm": 0.25942277908325195,
"learning_rate": 1.2432852386237515e-05,
"loss": 0.006,
"step": 3410
},
{
"epoch": 3.7920133111480867,
"grad_norm": 0.08538083732128143,
"learning_rate": 1.2410654827968925e-05,
"loss": 0.0038,
"step": 3420
},
{
"epoch": 3.803105934553522,
"grad_norm": 0.11854628473520279,
"learning_rate": 1.2388457269700334e-05,
"loss": 0.0038,
"step": 3430
},
{
"epoch": 3.8141985579589575,
"grad_norm": 0.2808975279331207,
"learning_rate": 1.2366259711431743e-05,
"loss": 0.0093,
"step": 3440
},
{
"epoch": 3.825291181364393,
"grad_norm": 1.0285228490829468,
"learning_rate": 1.2344062153163152e-05,
"loss": 0.0101,
"step": 3450
},
{
"epoch": 3.836383804769828,
"grad_norm": 0.43529564142227173,
"learning_rate": 1.2321864594894564e-05,
"loss": 0.0057,
"step": 3460
},
{
"epoch": 3.8474764281752636,
"grad_norm": 2.0706746578216553,
"learning_rate": 1.2299667036625972e-05,
"loss": 0.0079,
"step": 3470
},
{
"epoch": 3.858569051580699,
"grad_norm": 0.9805829524993896,
"learning_rate": 1.2277469478357382e-05,
"loss": 0.0116,
"step": 3480
},
{
"epoch": 3.8696616749861343,
"grad_norm": 0.754189133644104,
"learning_rate": 1.2255271920088792e-05,
"loss": 0.0056,
"step": 3490
},
{
"epoch": 3.8807542983915697,
"grad_norm": 0.9179038405418396,
"learning_rate": 1.22330743618202e-05,
"loss": 0.0079,
"step": 3500
},
{
"epoch": 3.891846921797005,
"grad_norm": 1.5467790365219116,
"learning_rate": 1.221087680355161e-05,
"loss": 0.009,
"step": 3510
},
{
"epoch": 3.9029395452024405,
"grad_norm": 0.44232311844825745,
"learning_rate": 1.218867924528302e-05,
"loss": 0.0072,
"step": 3520
},
{
"epoch": 3.914032168607876,
"grad_norm": 0.05650203302502632,
"learning_rate": 1.2166481687014428e-05,
"loss": 0.0053,
"step": 3530
},
{
"epoch": 3.925124792013311,
"grad_norm": 0.6348217129707336,
"learning_rate": 1.2144284128745838e-05,
"loss": 0.009,
"step": 3540
},
{
"epoch": 3.9362174154187466,
"grad_norm": 1.0749729871749878,
"learning_rate": 1.212208657047725e-05,
"loss": 0.0086,
"step": 3550
},
{
"epoch": 3.947310038824182,
"grad_norm": 2.320844888687134,
"learning_rate": 1.209988901220866e-05,
"loss": 0.0096,
"step": 3560
},
{
"epoch": 3.9584026622296173,
"grad_norm": 0.5694814324378967,
"learning_rate": 1.2077691453940067e-05,
"loss": 0.008,
"step": 3570
},
{
"epoch": 3.9694952856350527,
"grad_norm": 0.9794883728027344,
"learning_rate": 1.2055493895671477e-05,
"loss": 0.0045,
"step": 3580
},
{
"epoch": 3.980587909040488,
"grad_norm": 0.2133321315050125,
"learning_rate": 1.2033296337402887e-05,
"loss": 0.0054,
"step": 3590
},
{
"epoch": 3.9916805324459235,
"grad_norm": 1.423915147781372,
"learning_rate": 1.2011098779134295e-05,
"loss": 0.0127,
"step": 3600
},
{
"epoch": 4.0,
"eval_accuracy": 0.8699763593380615,
"eval_f1": 0.6728971962616822,
"eval_loss": 0.4916483163833618,
"eval_precision": 0.6449511400651465,
"eval_recall": 0.7033747779751333,
"eval_runtime": 2.7388,
"eval_samples_per_second": 360.381,
"eval_steps_per_second": 11.319,
"step": 3608
}
],
"logging_steps": 10,
"max_steps": 9010,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.844979161024907e+16,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}