| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.9657142857142857, | |
| "eval_steps": 500, | |
| "global_step": 86, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 3.0025, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 3.3344, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 2e-05, | |
| "loss": 2.8488, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.9992837548163315e-05, | |
| "loss": 2.6912, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9971360452796523e-05, | |
| "loss": 2.6867, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.993559947963185e-05, | |
| "loss": 2.5857, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9885605855918887e-05, | |
| "loss": 2.6284, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9821451197042028e-05, | |
| "loss": 2.5126, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9743227403932135e-05, | |
| "loss": 2.2892, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.9651046531419335e-05, | |
| "loss": 2.3933, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9545040627715554e-05, | |
| "loss": 2.2902, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.942536154525673e-05, | |
| "loss": 2.256, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9292180723175656e-05, | |
| "loss": 2.2393, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9145688941717074e-05, | |
| "loss": 2.1966, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.8986096048946826e-05, | |
| "loss": 2.1219, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.881363066014649e-05, | |
| "loss": 1.9816, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.862853983032423e-05, | |
| "loss": 2.1136, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.8431088700310846e-05, | |
| "loss": 2.0719, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.8221560116948103e-05, | |
| "loss": 2.009, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.8000254227913346e-05, | |
| "loss": 1.9665, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.7767488051760858e-05, | |
| "loss": 1.9006, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.7523595023795814e-05, | |
| "loss": 1.9061, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.7268924518431437e-05, | |
| "loss": 1.8054, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.700384134871351e-05, | |
| "loss": 1.9808, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.672872524372919e-05, | |
| "loss": 1.9158, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.644397030464877e-05, | |
| "loss": 1.8382, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.614998444017954e-05, | |
| "loss": 1.8274, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.5847188782240473e-05, | |
| "loss": 1.745, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.5536017082694846e-05, | |
| "loss": 1.7769, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.5216915092004847e-05, | |
| "loss": 1.9155, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.4890339920698334e-05, | |
| "loss": 1.7167, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.4556759384562418e-05, | |
| "loss": 1.8109, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.421665133450184e-05, | |
| "loss": 1.6917, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.3870502972022175e-05, | |
| "loss": 1.6803, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.351881015131833e-05, | |
| "loss": 1.6432, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.316207666896824e-05, | |
| "loss": 1.71, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.2800813542249073e-05, | |
| "loss": 1.732, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.2435538277109919e-05, | |
| "loss": 1.7514, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.206677412684953e-05, | |
| "loss": 1.7247, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.1695049342560969e-05, | |
| "loss": 1.6718, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.1320896416417026e-05, | |
| "loss": 1.7548, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.0944851318880314e-05, | |
| "loss": 1.7024, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.0567452730930743e-05, | |
| "loss": 1.56, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.0189241272410191e-05, | |
| "loss": 1.6043, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 9.810758727589814e-06, | |
| "loss": 1.69, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 9.43254726906926e-06, | |
| "loss": 1.5612, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 9.055148681119688e-06, | |
| "loss": 1.6349, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 8.67910358358298e-06, | |
| "loss": 1.6196, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 8.304950657439034e-06, | |
| "loss": 1.6262, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 7.93322587315047e-06, | |
| "loss": 1.5572, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 7.564461722890082e-06, | |
| "loss": 1.6005, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 7.199186457750931e-06, | |
| "loss": 1.3992, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 6.837923331031761e-06, | |
| "loss": 1.6098, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 6.48118984868167e-06, | |
| "loss": 1.5493, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 6.129497027977829e-06, | |
| "loss": 1.5581, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 5.78334866549816e-06, | |
| "loss": 1.5138, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 5.443240615437586e-06, | |
| "loss": 1.6091, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 5.109660079301668e-06, | |
| "loss": 1.5832, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 4.783084907995156e-06, | |
| "loss": 1.531, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 4.463982917305155e-06, | |
| "loss": 1.4781, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 4.152811217759529e-06, | |
| "loss": 1.5776, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 3.850015559820465e-06, | |
| "loss": 1.617, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 3.5560296953512296e-06, | |
| "loss": 1.4925, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 3.2712747562708115e-06, | |
| "loss": 1.538, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 2.9961586512864947e-06, | |
| "loss": 1.5641, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 2.7310754815685627e-06, | |
| "loss": 1.4769, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 2.4764049762041874e-06, | |
| "loss": 1.5378, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 2.2325119482391466e-06, | |
| "loss": 1.4627, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 1.9997457720866554e-06, | |
| "loss": 1.5337, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.7784398830519002e-06, | |
| "loss": 1.5069, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.5689112996891576e-06, | |
| "loss": 1.467, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.3714601696757713e-06, | |
| "loss": 1.5326, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.1863693398535115e-06, | |
| "loss": 1.5425, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.01390395105318e-06, | |
| "loss": 1.5971, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.543110582829272e-07, | |
| "loss": 1.5567, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 7.078192768243486e-07, | |
| "loss": 1.4339, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 5.746384547432738e-07, | |
| "loss": 1.4543, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 4.549593722844492e-07, | |
| "loss": 1.4892, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 3.4895346858066723e-07, | |
| "loss": 1.4933, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 2.5677259606786686e-07, | |
| "loss": 1.5602, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.7854880295797406e-07, | |
| "loss": 1.5017, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 1.1439414408111471e-07, | |
| "loss": 1.5328, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.440052036815081e-08, | |
| "loss": 1.5076, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 2.86395472034795e-08, | |
| "loss": 1.4469, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 7.162451836685291e-09, | |
| "loss": 1.4975, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 0.0, | |
| "loss": 1.4956, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "step": 86, | |
| "total_flos": 1.864529779831603e+16, | |
| "train_loss": 1.8037537461103395, | |
| "train_runtime": 866.9782, | |
| "train_samples_per_second": 12.918, | |
| "train_steps_per_second": 0.099 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 86, | |
| "num_train_epochs": 2, | |
| "save_steps": 1000, | |
| "total_flos": 1.864529779831603e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |