| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.9971509971509972, | |
| "global_step": 350, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 2e-05, | |
| "loss": 0.097, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 4e-05, | |
| "loss": 0.1096, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.999672139632675e-05, | |
| "loss": 0.098, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 3.9986886660231184e-05, | |
| "loss": 0.0934, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3.997049901613351e-05, | |
| "loss": 0.1051, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3.9947563836892725e-05, | |
| "loss": 0.1093, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 3.9918088642045126e-05, | |
| "loss": 0.1113, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.9882083095338934e-05, | |
| "loss": 0.1097, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 3.98395590015659e-05, | |
| "loss": 0.0988, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 3.979053030269103e-05, | |
| "loss": 0.1046, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 3.9735013073281564e-05, | |
| "loss": 0.102, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 3.967302551523671e-05, | |
| "loss": 0.1029, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 3.960458795182003e-05, | |
| "loss": 0.1102, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 3.95297228209962e-05, | |
| "loss": 0.1021, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 3.944845466807451e-05, | |
| "loss": 0.1015, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 3.936081013766143e-05, | |
| "loss": 0.109, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3.9266817964924905e-05, | |
| "loss": 0.1131, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 3.91665089661732e-05, | |
| "loss": 0.1019, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 3.9059916028751496e-05, | |
| "loss": 0.1032, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 3.894707410025941e-05, | |
| "loss": 0.104, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 3.882802017709307e-05, | |
| "loss": 0.1015, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 3.870279329231546e-05, | |
| "loss": 0.106, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 3.857143450285901e-05, | |
| "loss": 0.0968, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 3.84339868760647e-05, | |
| "loss": 0.0982, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 3.829049547556193e-05, | |
| "loss": 0.1069, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 3.8141007346493964e-05, | |
| "loss": 0.1048, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 3.798557150009373e-05, | |
| "loss": 0.1054, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 3.782423889761492e-05, | |
| "loss": 0.1146, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 3.7657062433623825e-05, | |
| "loss": 0.1015, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 3.748409691865737e-05, | |
| "loss": 0.0976, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 3.7305399061252795e-05, | |
| "loss": 0.1108, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 3.712102744935529e-05, | |
| "loss": 0.1041, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 3.6931042531109246e-05, | |
| "loss": 0.1061, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 3.673550659503975e-05, | |
| "loss": 0.0952, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 3.6534483749630624e-05, | |
| "loss": 0.1023, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.6328039902305806e-05, | |
| "loss": 0.0961, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 3.611624273782092e-05, | |
| "loss": 0.0971, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.589916169607209e-05, | |
| "loss": 0.1019, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 3.567686794932943e-05, | |
| "loss": 0.1016, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.544943437890238e-05, | |
| "loss": 0.1057, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 3.5216935551244896e-05, | |
| "loss": 0.104, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 3.4979447693508e-05, | |
| "loss": 0.103, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.4737048668547995e-05, | |
| "loss": 0.1039, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 3.4489817949398224e-05, | |
| "loss": 0.0955, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.423783659321307e-05, | |
| "loss": 0.1059, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 3.398118721469255e-05, | |
| "loss": 0.1028, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.371995395899618e-05, | |
| "loss": 0.1056, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 3.345422247415512e-05, | |
| "loss": 0.0985, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.3184079882991606e-05, | |
| "loss": 0.106, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 3.29096147545548e-05, | |
| "loss": 0.0983, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 3.2630917075082545e-05, | |
| "loss": 0.0979, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.234807821849838e-05, | |
| "loss": 0.0987, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 3.2061190916453745e-05, | |
| "loss": 0.1096, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.1770349227924854e-05, | |
| "loss": 0.11, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 3.147564850837455e-05, | |
| "loss": 0.1004, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.1177185378488984e-05, | |
| "loss": 0.0939, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 3.0875057692499566e-05, | |
| "loss": 0.0944, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 3.05693645061004e-05, | |
| "loss": 0.1117, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 3.0260206043971857e-05, | |
| "loss": 0.0962, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 2.9947683666920913e-05, | |
| "loss": 0.0993, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.9631899838648887e-05, | |
| "loss": 0.0946, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 2.9312958092157724e-05, | |
| "loss": 0.1003, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.8990962995805577e-05, | |
| "loss": 0.1009, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 2.866602011902301e-05, | |
| "loss": 0.0913, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 2.833823599770098e-05, | |
| "loss": 0.0961, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.8007718099261886e-05, | |
| "loss": 0.1042, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 2.767457478742533e-05, | |
| "loss": 0.1049, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.733891528667991e-05, | |
| "loss": 0.1063, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 2.7000849646472826e-05, | |
| "loss": 0.1028, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.6660488705129054e-05, | |
| "loss": 0.0973, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 2.6317944053511853e-05, | |
| "loss": 0.1022, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 2.5973327998436527e-05, | |
| "loss": 0.1044, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.562675352584947e-05, | |
| "loss": 0.104, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 2.5278334263784587e-05, | |
| "loss": 0.1015, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.4928184445109108e-05, | |
| "loss": 0.1026, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 2.457641887007121e-05, | |
| "loss": 0.1108, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.4223152868661535e-05, | |
| "loss": 0.104, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 2.3868502262801065e-05, | |
| "loss": 0.1013, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 2.3512583328367717e-05, | |
| "loss": 0.1004, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.3155512757074065e-05, | |
| "loss": 0.0986, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 2.2797407618208784e-05, | |
| "loss": 0.0986, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.2438385320254234e-05, | |
| "loss": 0.1106, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 2.2078563572392907e-05, | |
| "loss": 0.097, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.171806034591522e-05, | |
| "loss": 0.0981, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 2.135699383554144e-05, | |
| "loss": 0.1088, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 2.099548242067028e-05, | |
| "loss": 0.0911, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.0633644626567007e-05, | |
| "loss": 0.0978, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 2.0271599085503722e-05, | |
| "loss": 0.0912, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9909464497864487e-05, | |
| "loss": 0.107, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.954735959322825e-05, | |
| "loss": 0.1027, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9185403091442044e-05, | |
| "loss": 0.1048, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.882371366369749e-05, | |
| "loss": 0.0915, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.846240989362325e-05, | |
| "loss": 0.1041, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.810161023840607e-05, | |
| "loss": 0.1001, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.774143298995346e-05, | |
| "loss": 0.1043, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.7381996236110386e-05, | |
| "loss": 0.1067, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.702341782194301e-05, | |
| "loss": 0.1095, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.6665815311101896e-05, | |
| "loss": 0.1016, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.630930594727762e-05, | |
| "loss": 0.0963, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.5954006615761158e-05, | |
| "loss": 0.1036, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.560003380512185e-05, | |
| "loss": 0.1136, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.5247503569015413e-05, | |
| "loss": 0.0947, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.489653148813455e-05, | |
| "loss": 0.1105, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.4547232632314624e-05, | |
| "loss": 0.1033, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.4199721522806807e-05, | |
| "loss": 0.102, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.3854112094731116e-05, | |
| "loss": 0.1037, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.3510517659721583e-05, | |
| "loss": 0.1005, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.316905086877589e-05, | |
| "loss": 0.0979, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.2829823675321535e-05, | |
| "loss": 0.1007, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.2492947298510783e-05, | |
| "loss": 0.1002, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.2158532186756275e-05, | |
| "loss": 0.1037, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.182668798151939e-05, | |
| "loss": 0.1018, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.1497523481363146e-05, | |
| "loss": 0.1002, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.1171146606281482e-05, | |
| "loss": 0.0982, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.0847664362316549e-05, | |
| "loss": 0.102, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.0527182806475662e-05, | |
| "loss": 0.0928, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.020980701195946e-05, | |
| "loss": 0.0996, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 9.895641033712507e-06, | |
| "loss": 0.1014, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 9.584787874307828e-06, | |
| "loss": 0.0994, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 9.277349450176445e-06, | |
| "loss": 0.1092, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 8.97342655819303e-06, | |
| "loss": 0.0992, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 8.673118842628595e-06, | |
| "loss": 0.0892, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 8.376524762481069e-06, | |
| "loss": 0.0975, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 8.083741559194515e-06, | |
| "loss": 0.0982, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 7.794865224777504e-06, | |
| "loss": 0.1026, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.509990470331159e-06, | |
| "loss": 0.0973, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 7.229210694997113e-06, | |
| "loss": 0.0985, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 6.952617955335641e-06, | |
| "loss": 0.1005, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 6.680302935143963e-06, | |
| "loss": 0.0968, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 6.412354915724642e-06, | |
| "loss": 0.1079, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 6.14886174661373e-06, | |
| "loss": 0.0994, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.889909816778458e-06, | |
| "loss": 0.0991, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.635584026293655e-06, | |
| "loss": 0.098, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.385967758506407e-06, | |
| "loss": 0.1035, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.141142852697956e-06, | |
| "loss": 0.1022, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.901189577251864e-06, | |
| "loss": 0.0938, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.6661866033371506e-06, | |
| "loss": 0.1026, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.4362109791151695e-06, | |
| "loss": 0.0983, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.211338104478548e-06, | |
| "loss": 0.1036, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.991641706330575e-06, | |
| "loss": 0.092, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 3.777193814413045e-06, | |
| "loss": 0.1038, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.5680647376905666e-06, | |
| "loss": 0.1, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 3.3643230412990625e-06, | |
| "loss": 0.1053, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.1660355240659423e-06, | |
| "loss": 0.1029, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.973267196609453e-06, | |
| "loss": 0.0974, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 2.786081260024236e-06, | |
| "loss": 0.0966, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.604539085160218e-06, | |
| "loss": 0.1029, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 2.428700192501534e-06, | |
| "loss": 0.1031, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.2586222326521277e-06, | |
| "loss": 0.1017, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 2.0943609674343833e-06, | |
| "loss": 0.1021, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.9359702516070553e-06, | |
| "loss": 0.103, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.7835020152084116e-06, | |
| "loss": 0.1023, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.6370062465304503e-06, | |
| "loss": 0.1034, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.496530975729693e-06, | |
| "loss": 0.0926, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.3621222590800342e-06, | |
| "loss": 0.099, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.2338241638726811e-06, | |
| "loss": 0.0985, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.1116787539682571e-06, | |
| "loss": 0.1044, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 9.957260760057164e-07, | |
| "loss": 0.0929, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 8.860041462726543e-07, | |
| "loss": 0.0988, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 7.825489382412521e-07, | |
| "loss": 0.1043, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 6.853943707740218e-07, | |
| "loss": 0.0975, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 5.945722970031332e-07, | |
| "loss": 0.1065, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 5.101124938870605e-07, | |
| "loss": 0.0978, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 4.320426524478749e-07, | |
| "loss": 0.1, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 3.603883686924681e-07, | |
| "loss": 0.0967, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.951731352206322e-07, | |
| "loss": 0.0954, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 2.3641833352276768e-07, | |
| "loss": 0.1016, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.841432269697463e-07, | |
| "loss": 0.0993, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.3836495449719878e-07, | |
| "loss": 0.1002, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.90985249863563e-08, | |
| "loss": 0.1043, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 6.635681234321789e-08, | |
| "loss": 0.0984, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 4.0150551277724494e-08, | |
| "loss": 0.1046, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 2.0488333784249858e-08, | |
| "loss": 0.0948, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.376606324644986e-09, | |
| "loss": 0.0897, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 8.196677146932175e-10, | |
| "loss": 0.0971, | |
| "step": 350 | |
| } | |
| ], | |
| "max_steps": 351, | |
| "num_train_epochs": 1, | |
| "total_flos": 2.454170351173632e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |