| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.955918367346939, | |
| "eval_steps": 50.0, | |
| "global_step": 114, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.8797, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.0534, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 5e-06, | |
| "loss": 0.8716, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.7946, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 3.2005, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1e-05, | |
| "loss": 0.8402, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.1666666666666668e-05, | |
| "loss": 1.1973, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 1.2269, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 1.1162, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.6666666666666667e-05, | |
| "loss": 1.1691, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8333333333333333e-05, | |
| "loss": 1.3683, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 2e-05, | |
| "loss": 1.6888, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.999525719713366e-05, | |
| "loss": 1.6799, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.9981033287370443e-05, | |
| "loss": 1.9929, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.9957341762950346e-05, | |
| "loss": 3.0116, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.992420509671936e-05, | |
| "loss": 0.7142, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.9881654720812594e-05, | |
| "loss": 0.9264, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.982973099683902e-05, | |
| "loss": 1.5301, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.9768483177596008e-05, | |
| "loss": 2.8814, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.9697969360350098e-05, | |
| "loss": 2.4251, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.961825643172819e-05, | |
| "loss": 1.1431, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.9529420004271568e-05, | |
| "loss": 0.9725, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.9431544344712776e-05, | |
| "loss": 1.7915, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.932472229404356e-05, | |
| "loss": 2.5068, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.920905517944954e-05, | |
| "loss": 0.9562, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.9084652718195237e-05, | |
| "loss": 1.2789, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.8951632913550625e-05, | |
| "loss": 1.4285, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.8810121942857848e-05, | |
| "loss": 0.9021, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.866025403784439e-05, | |
| "loss": 1.2169, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.8502171357296144e-05, | |
| "loss": 1.0218, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.8336023852211197e-05, | |
| "loss": 1.1585, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.816196912356222e-05, | |
| "loss": 2.1427, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.7980172272802398e-05, | |
| "loss": 0.9307, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.7790805745256703e-05, | |
| "loss": 3.4475, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.7594049166547073e-05, | |
| "loss": 0.6663, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.7390089172206594e-05, | |
| "loss": 0.876, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.717911923064442e-05, | |
| "loss": 1.342, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.696133945962927e-05, | |
| "loss": 2.3712, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.6736956436465573e-05, | |
| "loss": 2.8832, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.650618300204242e-05, | |
| "loss": 0.7124, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.626923805894107e-05, | |
| "loss": 0.7767, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.6026346363792565e-05, | |
| "loss": 1.332, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.5777738314082514e-05, | |
| "loss": 2.0321, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.552364972960506e-05, | |
| "loss": 0.7454, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.526432162877356e-05, | |
| "loss": 0.9645, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.854, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.4730935568360103e-05, | |
| "loss": 0.5824, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.4457383557765385e-05, | |
| "loss": 0.9988, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.4179603448867836e-05, | |
| "loss": 0.4429, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 1.3897858732926794e-05, | |
| "loss": 1.0813, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 1.3612416661871532e-05, | |
| "loss": 1.8012, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.3323547994796597e-05, | |
| "loss": 0.4862, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.3031526741130435e-05, | |
| "loss": 1.9309, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.2736629900720832e-05, | |
| "loss": 0.5195, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.2439137201083772e-05, | |
| "loss": 0.8653, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.2139330832064975e-05, | |
| "loss": 0.8961, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.1837495178165706e-05, | |
| "loss": 2.9187, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 1.1533916548786856e-05, | |
| "loss": 1.9772, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.1228882906647142e-05, | |
| "loss": 0.4849, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.092268359463302e-05, | |
| "loss": 0.5423, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 1.0615609061339431e-05, | |
| "loss": 0.5995, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 1.0307950585561705e-05, | |
| "loss": 1.1515, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 1e-05, | |
| "loss": 0.6377, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 9.692049414438298e-06, | |
| "loss": 0.5577, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 9.384390938660572e-06, | |
| "loss": 0.6006, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 9.07731640536698e-06, | |
| "loss": 0.3747, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 8.771117093352861e-06, | |
| "loss": 0.5541, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 8.466083451213145e-06, | |
| "loss": 0.3197, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 8.162504821834296e-06, | |
| "loss": 0.7504, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 7.860669167935028e-06, | |
| "loss": 1.0028, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 7.560862798916229e-06, | |
| "loss": 0.372, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 7.263370099279173e-06, | |
| "loss": 0.9078, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 6.968473258869566e-06, | |
| "loss": 0.4496, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 6.6764520052034054e-06, | |
| "loss": 0.4063, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 6.387583338128471e-06, | |
| "loss": 0.447, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 6.102141267073207e-06, | |
| "loss": 1.5639, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 5.82039655113217e-06, | |
| "loss": 1.4854, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 5.542616442234618e-06, | |
| "loss": 0.3455, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 5.269064431639901e-06, | |
| "loss": 0.3386, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.4584, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 4.7356783712264405e-06, | |
| "loss": 1.0233, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 4.476350270394942e-06, | |
| "loss": 0.5479, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 4.222261685917489e-06, | |
| "loss": 0.4285, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 3.973653636207437e-06, | |
| "loss": 0.3907, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 3.730761941058938e-06, | |
| "loss": 0.2726, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 3.493816997957582e-06, | |
| "loss": 0.4647, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 3.2630435635344283e-06, | |
| "loss": 0.2346, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 3.0386605403707347e-06, | |
| "loss": 0.4546, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 2.820880769355582e-06, | |
| "loss": 0.7471, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 2.6099108277934105e-06, | |
| "loss": 0.2269, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 2.405950833452928e-06, | |
| "loss": 0.7229, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 2.209194254743295e-06, | |
| "loss": 0.42, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 2.019827727197605e-06, | |
| "loss": 0.2855, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 1.8380308764377841e-06, | |
| "loss": 0.3325, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.663976147788806e-06, | |
| "loss": 1.4909, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 1.4978286427038602e-06, | |
| "loss": 0.5706, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 1.339745962155613e-06, | |
| "loss": 0.3255, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 1.1898780571421554e-06, | |
| "loss": 0.2575, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 1.0483670864493777e-06, | |
| "loss": 0.2684, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 9.153472818047627e-07, | |
| "loss": 0.4387, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 7.909448205504633e-07, | |
| "loss": 0.4106, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 6.752777059564431e-07, | |
| "loss": 0.3488, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 5.684556552872256e-07, | |
| "loss": 0.2281, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 4.7057999572843516e-07, | |
| "loss": 0.1792, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 3.817435682718096e-07, | |
| "loss": 0.3339, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 3.020306396499062e-07, | |
| "loss": 0.1911, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 2.315168224039932e-07, | |
| "loss": 0.3763, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 1.7026900316098217e-07, | |
| "loss": 0.7196, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 1.1834527918740624e-07, | |
| "loss": 0.193, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 7.579490328064265e-08, | |
| "loss": 0.3355, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 4.2658237049655325e-08, | |
| "loss": 0.4939, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 1.896671262955896e-08, | |
| "loss": 0.2297, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 4.74280286634099e-09, | |
| "loss": 0.2416, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 0.0, | |
| "loss": 1.1284, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "step": 114, | |
| "total_flos": 5.099950184243134e+18, | |
| "train_loss": 0.9928589712893754, | |
| "train_runtime": 10459.5436, | |
| "train_samples_per_second": 5.622, | |
| "train_steps_per_second": 0.011 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 114, | |
| "num_train_epochs": 6, | |
| "save_steps": 100, | |
| "total_flos": 5.099950184243134e+18, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |