| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.006064166461369365, | |
| "eval_steps": 100, | |
| "global_step": 400, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 1.5160416153423412e-05, | |
| "eval_loss": 2.7578186988830566, | |
| "eval_runtime": 2130.3996, | |
| "eval_samples_per_second": 13.037, | |
| "eval_steps_per_second": 6.518, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 7.580208076711705e-05, | |
| "grad_norm": 0.22757090628147125, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 2.6285, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0001516041615342341, | |
| "grad_norm": 0.30123084783554077, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 2.6439, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.00022740624230135117, | |
| "grad_norm": 0.43347233533859253, | |
| "learning_rate": 5e-05, | |
| "loss": 2.6158, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0003032083230684682, | |
| "grad_norm": 0.38997018337249756, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 2.6351, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0003790104038355853, | |
| "grad_norm": 0.45476818084716797, | |
| "learning_rate": 8.333333333333334e-05, | |
| "loss": 2.6593, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.00045481248460270234, | |
| "grad_norm": 0.44464364647865295, | |
| "learning_rate": 0.0001, | |
| "loss": 2.5722, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0005306145653698194, | |
| "grad_norm": 0.40289220213890076, | |
| "learning_rate": 9.995494831023409e-05, | |
| "loss": 2.5307, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.0006064166461369364, | |
| "grad_norm": 0.47276997566223145, | |
| "learning_rate": 9.981987442712633e-05, | |
| "loss": 2.5108, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.0006822187269040535, | |
| "grad_norm": 0.5267543792724609, | |
| "learning_rate": 9.959502176294383e-05, | |
| "loss": 2.5934, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.0007580208076711706, | |
| "grad_norm": 0.7315161228179932, | |
| "learning_rate": 9.928079551738543e-05, | |
| "loss": 2.4946, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.0008338228884382876, | |
| "grad_norm": 0.36576610803604126, | |
| "learning_rate": 9.887776194738432e-05, | |
| "loss": 2.3427, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.0009096249692054047, | |
| "grad_norm": 0.33563587069511414, | |
| "learning_rate": 9.838664734667495e-05, | |
| "loss": 2.4108, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0009854270499725217, | |
| "grad_norm": 0.3877538740634918, | |
| "learning_rate": 9.780833673696254e-05, | |
| "loss": 2.4436, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.0010612291307396389, | |
| "grad_norm": 0.40572142601013184, | |
| "learning_rate": 9.714387227305422e-05, | |
| "loss": 2.3672, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0011370312115067558, | |
| "grad_norm": 0.3760512173175812, | |
| "learning_rate": 9.639445136482548e-05, | |
| "loss": 2.3948, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.0012128332922738728, | |
| "grad_norm": 0.4467146098613739, | |
| "learning_rate": 9.55614245194068e-05, | |
| "loss": 2.4006, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.00128863537304099, | |
| "grad_norm": 0.4707958996295929, | |
| "learning_rate": 9.464629290747842e-05, | |
| "loss": 2.4273, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.001364437453808107, | |
| "grad_norm": 0.6314570903778076, | |
| "learning_rate": 9.365070565805941e-05, | |
| "loss": 2.3876, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.001440239534575224, | |
| "grad_norm": 0.6339156627655029, | |
| "learning_rate": 9.257645688666556e-05, | |
| "loss": 2.3634, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.0015160416153423412, | |
| "grad_norm": 0.7461808919906616, | |
| "learning_rate": 9.142548246219212e-05, | |
| "loss": 2.4329, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0015160416153423412, | |
| "eval_loss": 2.39953351020813, | |
| "eval_runtime": 2140.4428, | |
| "eval_samples_per_second": 12.976, | |
| "eval_steps_per_second": 6.488, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0015918436961094582, | |
| "grad_norm": 0.4278106987476349, | |
| "learning_rate": 9.019985651834703e-05, | |
| "loss": 2.3485, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.0016676457768765752, | |
| "grad_norm": 0.38612955808639526, | |
| "learning_rate": 8.890178771592199e-05, | |
| "loss": 2.3566, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.0017434478576436924, | |
| "grad_norm": 0.39941906929016113, | |
| "learning_rate": 8.753361526263621e-05, | |
| "loss": 2.3124, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.0018192499384108093, | |
| "grad_norm": 0.43250414729118347, | |
| "learning_rate": 8.609780469772623e-05, | |
| "loss": 2.3929, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.0018950520191779265, | |
| "grad_norm": 0.5353224277496338, | |
| "learning_rate": 8.459694344887732e-05, | |
| "loss": 2.277, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.0019708540999450433, | |
| "grad_norm": 0.492675244808197, | |
| "learning_rate": 8.303373616950408e-05, | |
| "loss": 2.3773, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.0020466561807121605, | |
| "grad_norm": 0.5275689959526062, | |
| "learning_rate": 8.141099986478212e-05, | |
| "loss": 2.4119, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.0021224582614792777, | |
| "grad_norm": 0.6010898947715759, | |
| "learning_rate": 7.973165881521434e-05, | |
| "loss": 2.5141, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.0021982603422463945, | |
| "grad_norm": 0.8218650221824646, | |
| "learning_rate": 7.799873930687978e-05, | |
| "loss": 2.3623, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.0022740624230135117, | |
| "grad_norm": 0.8801436424255371, | |
| "learning_rate": 7.621536417786159e-05, | |
| "loss": 2.4681, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.002349864503780629, | |
| "grad_norm": 0.41547921299934387, | |
| "learning_rate": 7.438474719068173e-05, | |
| "loss": 2.2384, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.0024256665845477456, | |
| "grad_norm": 0.4640543460845947, | |
| "learning_rate": 7.251018724088367e-05, | |
| "loss": 2.3115, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.002501468665314863, | |
| "grad_norm": 0.48376739025115967, | |
| "learning_rate": 7.059506241219965e-05, | |
| "loss": 2.3587, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.00257727074608198, | |
| "grad_norm": 0.48906421661376953, | |
| "learning_rate": 6.864282388901544e-05, | |
| "loss": 2.3416, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.002653072826849097, | |
| "grad_norm": 0.493834525346756, | |
| "learning_rate": 6.665698973710288e-05, | |
| "loss": 2.2795, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.002728874907616214, | |
| "grad_norm": 0.5276143550872803, | |
| "learning_rate": 6.464113856382752e-05, | |
| "loss": 2.363, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.002804676988383331, | |
| "grad_norm": 0.5667726993560791, | |
| "learning_rate": 6.259890306925627e-05, | |
| "loss": 2.3158, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.002880479069150448, | |
| "grad_norm": 0.5737515091896057, | |
| "learning_rate": 6.0533963499786314e-05, | |
| "loss": 2.4867, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.002956281149917565, | |
| "grad_norm": 0.6294056177139282, | |
| "learning_rate": 5.8450041016092464e-05, | |
| "loss": 2.3669, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.0030320832306846824, | |
| "grad_norm": 0.8288158178329468, | |
| "learning_rate": 5.6350890987343944e-05, | |
| "loss": 2.2939, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0030320832306846824, | |
| "eval_loss": 2.360253095626831, | |
| "eval_runtime": 2139.6586, | |
| "eval_samples_per_second": 12.981, | |
| "eval_steps_per_second": 6.49, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.003107885311451799, | |
| "grad_norm": 0.4714564383029938, | |
| "learning_rate": 5.4240296223775465e-05, | |
| "loss": 2.2905, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.0031836873922189163, | |
| "grad_norm": 0.4659261703491211, | |
| "learning_rate": 5.212206015980742e-05, | |
| "loss": 2.3667, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0032594894729860335, | |
| "grad_norm": 0.465675950050354, | |
| "learning_rate": 5e-05, | |
| "loss": 2.304, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.0033352915537531503, | |
| "grad_norm": 0.5581633448600769, | |
| "learning_rate": 4.78779398401926e-05, | |
| "loss": 2.3988, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.0034110936345202675, | |
| "grad_norm": 0.5281254649162292, | |
| "learning_rate": 4.575970377622456e-05, | |
| "loss": 2.3941, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.0034868957152873847, | |
| "grad_norm": 0.615689218044281, | |
| "learning_rate": 4.364910901265606e-05, | |
| "loss": 2.3505, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.0035626977960545015, | |
| "grad_norm": 0.6545027494430542, | |
| "learning_rate": 4.1549958983907555e-05, | |
| "loss": 2.4235, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.0036384998768216187, | |
| "grad_norm": 0.6870514154434204, | |
| "learning_rate": 3.94660365002137e-05, | |
| "loss": 2.305, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.003714301957588736, | |
| "grad_norm": 0.8205248117446899, | |
| "learning_rate": 3.740109693074375e-05, | |
| "loss": 2.4011, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.003790104038355853, | |
| "grad_norm": 0.9560214281082153, | |
| "learning_rate": 3.5358861436172485e-05, | |
| "loss": 2.5063, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.00386590611912297, | |
| "grad_norm": 0.503361165523529, | |
| "learning_rate": 3.334301026289712e-05, | |
| "loss": 2.2964, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.003941708199890087, | |
| "grad_norm": 0.4707900285720825, | |
| "learning_rate": 3.135717611098458e-05, | |
| "loss": 2.2691, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.004017510280657204, | |
| "grad_norm": 0.5029754638671875, | |
| "learning_rate": 2.9404937587800375e-05, | |
| "loss": 2.3365, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.004093312361424321, | |
| "grad_norm": 0.5085511803627014, | |
| "learning_rate": 2.748981275911633e-05, | |
| "loss": 2.2815, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.004169114442191438, | |
| "grad_norm": 0.6214500069618225, | |
| "learning_rate": 2.5615252809318284e-05, | |
| "loss": 2.3879, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.004244916522958555, | |
| "grad_norm": 0.6104791164398193, | |
| "learning_rate": 2.3784635822138424e-05, | |
| "loss": 2.3606, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.004320718603725672, | |
| "grad_norm": 0.6343466639518738, | |
| "learning_rate": 2.2001260693120233e-05, | |
| "loss": 2.3203, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.004396520684492789, | |
| "grad_norm": 0.6237061023712158, | |
| "learning_rate": 2.026834118478567e-05, | |
| "loss": 2.2779, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.004472322765259907, | |
| "grad_norm": 0.7434378266334534, | |
| "learning_rate": 1.858900013521788e-05, | |
| "loss": 2.3089, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.004548124846027023, | |
| "grad_norm": 0.9040917754173279, | |
| "learning_rate": 1.6966263830495936e-05, | |
| "loss": 2.4521, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.004548124846027023, | |
| "eval_loss": 2.3442423343658447, | |
| "eval_runtime": 2138.8652, | |
| "eval_samples_per_second": 12.985, | |
| "eval_steps_per_second": 6.493, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.00462392692679414, | |
| "grad_norm": 0.4680471122264862, | |
| "learning_rate": 1.5403056551122697e-05, | |
| "loss": 2.3251, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.004699729007561258, | |
| "grad_norm": 0.5053917169570923, | |
| "learning_rate": 1.3902195302273779e-05, | |
| "loss": 2.383, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.0047755310883283745, | |
| "grad_norm": 0.5932111144065857, | |
| "learning_rate": 1.246638473736378e-05, | |
| "loss": 2.3376, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.004851333169095491, | |
| "grad_norm": 0.5413691997528076, | |
| "learning_rate": 1.1098212284078036e-05, | |
| "loss": 2.3513, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.004927135249862609, | |
| "grad_norm": 0.726198673248291, | |
| "learning_rate": 9.800143481652979e-06, | |
| "loss": 2.4017, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.005002937330629726, | |
| "grad_norm": 0.5952734351158142, | |
| "learning_rate": 8.574517537807897e-06, | |
| "loss": 2.2666, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.0050787394113968425, | |
| "grad_norm": 0.6325485706329346, | |
| "learning_rate": 7.423543113334436e-06, | |
| "loss": 2.2846, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.00515454149216396, | |
| "grad_norm": 0.6884042024612427, | |
| "learning_rate": 6.349294341940593e-06, | |
| "loss": 2.3291, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.005230343572931077, | |
| "grad_norm": 0.7182841897010803, | |
| "learning_rate": 5.353707092521582e-06, | |
| "loss": 2.3911, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.005306145653698194, | |
| "grad_norm": 1.0043755769729614, | |
| "learning_rate": 4.43857548059321e-06, | |
| "loss": 2.3998, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.005381947734465311, | |
| "grad_norm": 0.4532211720943451, | |
| "learning_rate": 3.605548635174533e-06, | |
| "loss": 2.26, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.005457749815232428, | |
| "grad_norm": 0.5419708490371704, | |
| "learning_rate": 2.85612772694579e-06, | |
| "loss": 2.3052, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.005533551895999545, | |
| "grad_norm": 0.5240336656570435, | |
| "learning_rate": 2.191663263037458e-06, | |
| "loss": 2.3272, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.005609353976766662, | |
| "grad_norm": 0.5560078024864197, | |
| "learning_rate": 1.6133526533250565e-06, | |
| "loss": 2.292, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.005685156057533779, | |
| "grad_norm": 0.5708209872245789, | |
| "learning_rate": 1.1222380526156928e-06, | |
| "loss": 2.3481, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.005760958138300896, | |
| "grad_norm": 0.6118387579917908, | |
| "learning_rate": 7.192044826145771e-07, | |
| "loss": 2.316, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.005836760219068014, | |
| "grad_norm": 0.5928770899772644, | |
| "learning_rate": 4.049782370561583e-07, | |
| "loss": 2.3097, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.00591256229983513, | |
| "grad_norm": 0.6476154923439026, | |
| "learning_rate": 1.8012557287367392e-07, | |
| "loss": 2.3382, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.005988364380602247, | |
| "grad_norm": 0.7122578620910645, | |
| "learning_rate": 4.5051689765929214e-08, | |
| "loss": 2.3883, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.006064166461369365, | |
| "grad_norm": 1.039080262184143, | |
| "learning_rate": 0.0, | |
| "loss": 2.384, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.006064166461369365, | |
| "eval_loss": 2.341160297393799, | |
| "eval_runtime": 2139.1179, | |
| "eval_samples_per_second": 12.984, | |
| "eval_steps_per_second": 6.492, | |
| "step": 400 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 400, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.302999393632256e+17, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |