| { | |
| "best_metric": 26.5388, | |
| "best_model_checkpoint": "output/checkpoint-200000", | |
| "epoch": 14.498006524102935, | |
| "eval_steps": 50000, | |
| "global_step": 200000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 1.9951673311586325e-05, | |
| "loss": 12.9031, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.9903346623172648e-05, | |
| "loss": 4.995, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.985501993475897e-05, | |
| "loss": 4.013, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9806693246345298e-05, | |
| "loss": 3.7433, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9758366557931618e-05, | |
| "loss": 3.5754, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.9710039869517944e-05, | |
| "loss": 3.4371, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9661713181104268e-05, | |
| "loss": 3.3418, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.961338649269059e-05, | |
| "loss": 3.2642, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9565059804276914e-05, | |
| "loss": 3.2133, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9516733115863237e-05, | |
| "loss": 3.1533, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.946840642744956e-05, | |
| "loss": 3.1304, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.9420079739035884e-05, | |
| "loss": 3.0707, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9371753050622207e-05, | |
| "loss": 3.0091, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.932342636220853e-05, | |
| "loss": 2.9992, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.9275099673794853e-05, | |
| "loss": 2.9653, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.922677298538118e-05, | |
| "loss": 2.9463, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.91784462969675e-05, | |
| "loss": 2.9304, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.9130119608553827e-05, | |
| "loss": 2.8825, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.908179292014015e-05, | |
| "loss": 2.863, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.9033466231726473e-05, | |
| "loss": 2.8367, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.8985139543312796e-05, | |
| "loss": 2.8211, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.893681285489912e-05, | |
| "loss": 2.8159, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.8888486166485443e-05, | |
| "loss": 2.7894, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.8840159478071766e-05, | |
| "loss": 2.7596, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.8791832789658093e-05, | |
| "loss": 2.7576, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8743506101244412e-05, | |
| "loss": 2.7496, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.869517941283074e-05, | |
| "loss": 2.728, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.8646852724417062e-05, | |
| "loss": 2.6927, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.8598526036003382e-05, | |
| "loss": 2.7037, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.855019934758971e-05, | |
| "loss": 2.6919, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.8501872659176032e-05, | |
| "loss": 2.66, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.8453545970762355e-05, | |
| "loss": 2.6546, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.840521928234868e-05, | |
| "loss": 2.6415, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.8356892593935e-05, | |
| "loss": 2.6423, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.8308565905521325e-05, | |
| "loss": 2.625, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.8260239217107648e-05, | |
| "loss": 2.6041, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.8211912528693975e-05, | |
| "loss": 2.5971, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.8163585840280295e-05, | |
| "loss": 2.5993, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.811525915186662e-05, | |
| "loss": 2.5707, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.8066932463452944e-05, | |
| "loss": 2.5933, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.8018605775039268e-05, | |
| "loss": 2.5669, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.797027908662559e-05, | |
| "loss": 2.5681, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.7921952398211914e-05, | |
| "loss": 2.5648, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.7873625709798237e-05, | |
| "loss": 2.5403, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 1.782529902138456e-05, | |
| "loss": 2.5314, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.7776972332970884e-05, | |
| "loss": 2.5094, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.7728645644557207e-05, | |
| "loss": 2.537, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.768031895614353e-05, | |
| "loss": 2.5389, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.7631992267729857e-05, | |
| "loss": 2.5243, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 1.7583665579316177e-05, | |
| "loss": 2.5064, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.7535338890902503e-05, | |
| "loss": 2.5106, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.7487012202488827e-05, | |
| "loss": 2.4955, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.743868551407515e-05, | |
| "loss": 2.488, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.7390358825661473e-05, | |
| "loss": 2.4869, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.7342032137247796e-05, | |
| "loss": 2.4838, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.729370544883412e-05, | |
| "loss": 2.4756, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.7245378760420443e-05, | |
| "loss": 2.4512, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 1.719705207200677e-05, | |
| "loss": 2.4456, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.714872538359309e-05, | |
| "loss": 2.4485, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.7100398695179416e-05, | |
| "loss": 2.4304, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 1.705207200676574e-05, | |
| "loss": 2.4417, | |
| "step": 30500 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.7003745318352062e-05, | |
| "loss": 2.43, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 1.6955418629938385e-05, | |
| "loss": 2.443, | |
| "step": 31500 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.690709194152471e-05, | |
| "loss": 2.4195, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.6858765253111032e-05, | |
| "loss": 2.4103, | |
| "step": 32500 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 1.6810438564697355e-05, | |
| "loss": 2.4133, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.676211187628368e-05, | |
| "loss": 2.4059, | |
| "step": 33500 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.671378518787e-05, | |
| "loss": 2.4059, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.6665458499456325e-05, | |
| "loss": 2.4109, | |
| "step": 34500 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.661713181104265e-05, | |
| "loss": 2.4025, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.656880512262897e-05, | |
| "loss": 2.397, | |
| "step": 35500 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 1.6520478434215298e-05, | |
| "loss": 2.3958, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 1.647215174580162e-05, | |
| "loss": 2.3681, | |
| "step": 36500 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 1.6423825057387944e-05, | |
| "loss": 2.4033, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.6375498368974268e-05, | |
| "loss": 2.3801, | |
| "step": 37500 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 1.632717168056059e-05, | |
| "loss": 2.3898, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.6278844992146914e-05, | |
| "loss": 2.3613, | |
| "step": 38500 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.6230518303733237e-05, | |
| "loss": 2.3879, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.6182191615319564e-05, | |
| "loss": 2.362, | |
| "step": 39500 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.6133864926905884e-05, | |
| "loss": 2.3625, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 1.608553823849221e-05, | |
| "loss": 2.3635, | |
| "step": 40500 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 1.6037211550078534e-05, | |
| "loss": 2.3615, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 1.5988884861664853e-05, | |
| "loss": 2.3327, | |
| "step": 41500 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.594055817325118e-05, | |
| "loss": 2.3424, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.5892231484837503e-05, | |
| "loss": 2.3462, | |
| "step": 42500 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "learning_rate": 1.5843904796423827e-05, | |
| "loss": 2.3395, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "learning_rate": 1.579557810801015e-05, | |
| "loss": 2.3287, | |
| "step": 43500 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 1.5747251419596473e-05, | |
| "loss": 2.3408, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.5698924731182796e-05, | |
| "loss": 2.3126, | |
| "step": 44500 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.565059804276912e-05, | |
| "loss": 2.3234, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "learning_rate": 1.5602271354355446e-05, | |
| "loss": 2.324, | |
| "step": 45500 | |
| }, | |
| { | |
| "epoch": 3.33, | |
| "learning_rate": 1.5553944665941766e-05, | |
| "loss": 2.3156, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 1.5505617977528093e-05, | |
| "loss": 2.3264, | |
| "step": 46500 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.5457291289114416e-05, | |
| "loss": 2.3086, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 1.540896460070074e-05, | |
| "loss": 2.3095, | |
| "step": 47500 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.5360637912287062e-05, | |
| "loss": 2.3015, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.5312311223873385e-05, | |
| "loss": 2.302, | |
| "step": 48500 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 1.526398453545971e-05, | |
| "loss": 2.3164, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "learning_rate": 1.5215657847046032e-05, | |
| "loss": 2.3067, | |
| "step": 49500 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "learning_rate": 1.5167331158632357e-05, | |
| "loss": 2.2705, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.62, | |
| "eval_bleu": 23.2646, | |
| "eval_gen_len": 26.1331, | |
| "eval_loss": 1.9072902202606201, | |
| "eval_runtime": 5550.0392, | |
| "eval_samples_per_second": 9.942, | |
| "eval_steps_per_second": 1.243, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 1.5119004470218678e-05, | |
| "loss": 2.2769, | |
| "step": 50500 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 1.5070677781805003e-05, | |
| "loss": 2.3089, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 1.5022351093391328e-05, | |
| "loss": 2.2817, | |
| "step": 51500 | |
| }, | |
| { | |
| "epoch": 3.77, | |
| "learning_rate": 1.497402440497765e-05, | |
| "loss": 2.2757, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 1.4925697716563973e-05, | |
| "loss": 2.287, | |
| "step": 52500 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 1.4877371028150298e-05, | |
| "loss": 2.289, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 3.88, | |
| "learning_rate": 1.482904433973662e-05, | |
| "loss": 2.268, | |
| "step": 53500 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 1.4780717651322944e-05, | |
| "loss": 2.2766, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 1.473239096290927e-05, | |
| "loss": 2.2775, | |
| "step": 54500 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 1.4684064274495591e-05, | |
| "loss": 2.2795, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 1.4635737586081916e-05, | |
| "loss": 2.2683, | |
| "step": 55500 | |
| }, | |
| { | |
| "epoch": 4.06, | |
| "learning_rate": 1.4587410897668239e-05, | |
| "loss": 2.2551, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 1.4539084209254562e-05, | |
| "loss": 2.2511, | |
| "step": 56500 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 1.4490757520840885e-05, | |
| "loss": 2.258, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 1.444243083242721e-05, | |
| "loss": 2.2593, | |
| "step": 57500 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 1.4394104144013532e-05, | |
| "loss": 2.2481, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "learning_rate": 1.4345777455599857e-05, | |
| "loss": 2.2485, | |
| "step": 58500 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 1.429745076718618e-05, | |
| "loss": 2.2594, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 1.4249124078772503e-05, | |
| "loss": 2.2593, | |
| "step": 59500 | |
| }, | |
| { | |
| "epoch": 4.35, | |
| "learning_rate": 1.4200797390358827e-05, | |
| "loss": 2.242, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 1.4152470701945151e-05, | |
| "loss": 2.2294, | |
| "step": 60500 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 1.4104144013531473e-05, | |
| "loss": 2.2517, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 1.4055817325117798e-05, | |
| "loss": 2.2243, | |
| "step": 61500 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 1.4007490636704121e-05, | |
| "loss": 2.2484, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.3959163948290444e-05, | |
| "loss": 2.228, | |
| "step": 62500 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.3910837259876768e-05, | |
| "loss": 2.2314, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.3862510571463093e-05, | |
| "loss": 2.2317, | |
| "step": 63500 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.3814183883049414e-05, | |
| "loss": 2.2187, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.3765857194635739e-05, | |
| "loss": 2.2325, | |
| "step": 64500 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.3717530506222064e-05, | |
| "loss": 2.2221, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 1.3669203817808385e-05, | |
| "loss": 2.2218, | |
| "step": 65500 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 1.3620877129394709e-05, | |
| "loss": 2.2156, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "learning_rate": 1.3572550440981034e-05, | |
| "loss": 2.2232, | |
| "step": 66500 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 1.3524223752567355e-05, | |
| "loss": 2.2084, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.347589706415368e-05, | |
| "loss": 2.2374, | |
| "step": 67500 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.3427570375740005e-05, | |
| "loss": 2.2046, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "learning_rate": 1.3379243687326327e-05, | |
| "loss": 2.2109, | |
| "step": 68500 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "learning_rate": 1.3330916998912651e-05, | |
| "loss": 2.2115, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 1.3282590310498975e-05, | |
| "loss": 2.2053, | |
| "step": 69500 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 1.3234263622085298e-05, | |
| "loss": 2.1956, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 1.3185936933671621e-05, | |
| "loss": 2.2031, | |
| "step": 70500 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 1.3137610245257946e-05, | |
| "loss": 2.1959, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 5.18, | |
| "learning_rate": 1.3089283556844268e-05, | |
| "loss": 2.2023, | |
| "step": 71500 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 1.3040956868430593e-05, | |
| "loss": 2.197, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 5.26, | |
| "learning_rate": 1.2992630180016916e-05, | |
| "loss": 2.212, | |
| "step": 72500 | |
| }, | |
| { | |
| "epoch": 5.29, | |
| "learning_rate": 1.2944303491603239e-05, | |
| "loss": 2.204, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 1.2895976803189562e-05, | |
| "loss": 2.1871, | |
| "step": 73500 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 1.2847650114775887e-05, | |
| "loss": 2.1852, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 1.2799323426362209e-05, | |
| "loss": 2.1804, | |
| "step": 74500 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "learning_rate": 1.2750996737948534e-05, | |
| "loss": 2.1949, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 5.47, | |
| "learning_rate": 1.2702670049534857e-05, | |
| "loss": 2.1662, | |
| "step": 75500 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 1.265434336112118e-05, | |
| "loss": 2.1871, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 5.55, | |
| "learning_rate": 1.2606016672707503e-05, | |
| "loss": 2.1841, | |
| "step": 76500 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 1.2557689984293828e-05, | |
| "loss": 2.1719, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 1.250936329588015e-05, | |
| "loss": 2.1755, | |
| "step": 77500 | |
| }, | |
| { | |
| "epoch": 5.65, | |
| "learning_rate": 1.2461036607466475e-05, | |
| "loss": 2.1589, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 1.24127099190528e-05, | |
| "loss": 2.1785, | |
| "step": 78500 | |
| }, | |
| { | |
| "epoch": 5.73, | |
| "learning_rate": 1.2364383230639121e-05, | |
| "loss": 2.1931, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "learning_rate": 1.2316056542225444e-05, | |
| "loss": 2.1827, | |
| "step": 79500 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 1.226772985381177e-05, | |
| "loss": 2.1628, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "learning_rate": 1.2219403165398091e-05, | |
| "loss": 2.1702, | |
| "step": 80500 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 1.2171076476984416e-05, | |
| "loss": 2.161, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 5.91, | |
| "learning_rate": 1.212274978857074e-05, | |
| "loss": 2.172, | |
| "step": 81500 | |
| }, | |
| { | |
| "epoch": 5.94, | |
| "learning_rate": 1.2074423100157062e-05, | |
| "loss": 2.1731, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 1.2026096411743387e-05, | |
| "loss": 2.1582, | |
| "step": 82500 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.197776972332971e-05, | |
| "loss": 2.1739, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 6.05, | |
| "learning_rate": 1.1929443034916034e-05, | |
| "loss": 2.1447, | |
| "step": 83500 | |
| }, | |
| { | |
| "epoch": 6.09, | |
| "learning_rate": 1.1881116346502357e-05, | |
| "loss": 2.159, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 6.13, | |
| "learning_rate": 1.1832789658088682e-05, | |
| "loss": 2.17, | |
| "step": 84500 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "learning_rate": 1.1784462969675003e-05, | |
| "loss": 2.1441, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.1736136281261328e-05, | |
| "loss": 2.1447, | |
| "step": 85500 | |
| }, | |
| { | |
| "epoch": 6.23, | |
| "learning_rate": 1.1687809592847651e-05, | |
| "loss": 2.1529, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 6.27, | |
| "learning_rate": 1.1639482904433975e-05, | |
| "loss": 2.1558, | |
| "step": 86500 | |
| }, | |
| { | |
| "epoch": 6.31, | |
| "learning_rate": 1.1591156216020298e-05, | |
| "loss": 2.1356, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 6.34, | |
| "learning_rate": 1.1542829527606623e-05, | |
| "loss": 2.1593, | |
| "step": 87500 | |
| }, | |
| { | |
| "epoch": 6.38, | |
| "learning_rate": 1.1494502839192944e-05, | |
| "loss": 2.1467, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 6.42, | |
| "learning_rate": 1.144617615077927e-05, | |
| "loss": 2.1533, | |
| "step": 88500 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.1397849462365593e-05, | |
| "loss": 2.1261, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 6.49, | |
| "learning_rate": 1.1349522773951916e-05, | |
| "loss": 2.1519, | |
| "step": 89500 | |
| }, | |
| { | |
| "epoch": 6.52, | |
| "learning_rate": 1.1301196085538239e-05, | |
| "loss": 2.1513, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 6.56, | |
| "learning_rate": 1.1252869397124564e-05, | |
| "loss": 2.1404, | |
| "step": 90500 | |
| }, | |
| { | |
| "epoch": 6.6, | |
| "learning_rate": 1.1204542708710885e-05, | |
| "loss": 2.1427, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 6.63, | |
| "learning_rate": 1.115621602029721e-05, | |
| "loss": 2.138, | |
| "step": 91500 | |
| }, | |
| { | |
| "epoch": 6.67, | |
| "learning_rate": 1.1107889331883535e-05, | |
| "loss": 2.1382, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 6.71, | |
| "learning_rate": 1.1059562643469857e-05, | |
| "loss": 2.1344, | |
| "step": 92500 | |
| }, | |
| { | |
| "epoch": 6.74, | |
| "learning_rate": 1.101123595505618e-05, | |
| "loss": 2.1474, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 6.78, | |
| "learning_rate": 1.0962909266642505e-05, | |
| "loss": 2.1197, | |
| "step": 93500 | |
| }, | |
| { | |
| "epoch": 6.81, | |
| "learning_rate": 1.0914582578228827e-05, | |
| "loss": 2.146, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 6.85, | |
| "learning_rate": 1.0866255889815151e-05, | |
| "loss": 2.1313, | |
| "step": 94500 | |
| }, | |
| { | |
| "epoch": 6.89, | |
| "learning_rate": 1.0817929201401476e-05, | |
| "loss": 2.1374, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 6.92, | |
| "learning_rate": 1.0769602512987798e-05, | |
| "loss": 2.1329, | |
| "step": 95500 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "learning_rate": 1.0721275824574123e-05, | |
| "loss": 2.132, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "learning_rate": 1.0672949136160446e-05, | |
| "loss": 2.1142, | |
| "step": 96500 | |
| }, | |
| { | |
| "epoch": 7.03, | |
| "learning_rate": 1.062462244774677e-05, | |
| "loss": 2.1338, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.0576295759333093e-05, | |
| "loss": 2.1137, | |
| "step": 97500 | |
| }, | |
| { | |
| "epoch": 7.1, | |
| "learning_rate": 1.0527969070919417e-05, | |
| "loss": 2.1221, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 7.14, | |
| "learning_rate": 1.0479642382505739e-05, | |
| "loss": 2.1382, | |
| "step": 98500 | |
| }, | |
| { | |
| "epoch": 7.18, | |
| "learning_rate": 1.0431315694092064e-05, | |
| "loss": 2.1207, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 7.21, | |
| "learning_rate": 1.0382989005678387e-05, | |
| "loss": 2.1163, | |
| "step": 99500 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "learning_rate": 1.033466231726471e-05, | |
| "loss": 2.1263, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "eval_bleu": 25.2752, | |
| "eval_gen_len": 24.8098, | |
| "eval_loss": 1.8044791221618652, | |
| "eval_runtime": 4767.4438, | |
| "eval_samples_per_second": 11.574, | |
| "eval_steps_per_second": 1.447, | |
| "step": 100000 | |
| }, | |
| { | |
| "epoch": 7.29, | |
| "learning_rate": 1.0286335628851034e-05, | |
| "loss": 2.0977, | |
| "step": 100500 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.0238008940437359e-05, | |
| "loss": 2.1114, | |
| "step": 101000 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "learning_rate": 1.018968225202368e-05, | |
| "loss": 2.1208, | |
| "step": 101500 | |
| }, | |
| { | |
| "epoch": 7.39, | |
| "learning_rate": 1.0141355563610005e-05, | |
| "loss": 2.1006, | |
| "step": 102000 | |
| }, | |
| { | |
| "epoch": 7.43, | |
| "learning_rate": 1.0093028875196328e-05, | |
| "loss": 2.1091, | |
| "step": 102500 | |
| }, | |
| { | |
| "epoch": 7.47, | |
| "learning_rate": 1.0044702186782651e-05, | |
| "loss": 2.1347, | |
| "step": 103000 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 9.996375498368975e-06, | |
| "loss": 2.1106, | |
| "step": 103500 | |
| }, | |
| { | |
| "epoch": 7.54, | |
| "learning_rate": 9.948048809955298e-06, | |
| "loss": 2.1071, | |
| "step": 104000 | |
| }, | |
| { | |
| "epoch": 7.58, | |
| "learning_rate": 9.899722121541621e-06, | |
| "loss": 2.0954, | |
| "step": 104500 | |
| }, | |
| { | |
| "epoch": 7.61, | |
| "learning_rate": 9.851395433127946e-06, | |
| "loss": 2.1072, | |
| "step": 105000 | |
| }, | |
| { | |
| "epoch": 7.65, | |
| "learning_rate": 9.80306874471427e-06, | |
| "loss": 2.1051, | |
| "step": 105500 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "learning_rate": 9.754742056300593e-06, | |
| "loss": 2.1066, | |
| "step": 106000 | |
| }, | |
| { | |
| "epoch": 7.72, | |
| "learning_rate": 9.706415367886916e-06, | |
| "loss": 2.1122, | |
| "step": 106500 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "learning_rate": 9.658088679473239e-06, | |
| "loss": 2.0952, | |
| "step": 107000 | |
| }, | |
| { | |
| "epoch": 7.79, | |
| "learning_rate": 9.609761991059562e-06, | |
| "loss": 2.085, | |
| "step": 107500 | |
| }, | |
| { | |
| "epoch": 7.83, | |
| "learning_rate": 9.561435302645887e-06, | |
| "loss": 2.1076, | |
| "step": 108000 | |
| }, | |
| { | |
| "epoch": 7.87, | |
| "learning_rate": 9.51310861423221e-06, | |
| "loss": 2.1236, | |
| "step": 108500 | |
| }, | |
| { | |
| "epoch": 7.9, | |
| "learning_rate": 9.464781925818534e-06, | |
| "loss": 2.0932, | |
| "step": 109000 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 9.416455237404859e-06, | |
| "loss": 2.0892, | |
| "step": 109500 | |
| }, | |
| { | |
| "epoch": 7.97, | |
| "learning_rate": 9.368128548991182e-06, | |
| "loss": 2.0778, | |
| "step": 110000 | |
| }, | |
| { | |
| "epoch": 8.01, | |
| "learning_rate": 9.319801860577505e-06, | |
| "loss": 2.1035, | |
| "step": 110500 | |
| }, | |
| { | |
| "epoch": 8.05, | |
| "learning_rate": 9.271475172163828e-06, | |
| "loss": 2.0876, | |
| "step": 111000 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "learning_rate": 9.223148483750151e-06, | |
| "loss": 2.0869, | |
| "step": 111500 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 9.174821795336475e-06, | |
| "loss": 2.0972, | |
| "step": 112000 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "learning_rate": 9.1264951069228e-06, | |
| "loss": 2.0948, | |
| "step": 112500 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 9.078168418509123e-06, | |
| "loss": 2.0742, | |
| "step": 113000 | |
| }, | |
| { | |
| "epoch": 8.23, | |
| "learning_rate": 9.029841730095446e-06, | |
| "loss": 2.085, | |
| "step": 113500 | |
| }, | |
| { | |
| "epoch": 8.26, | |
| "learning_rate": 8.98151504168177e-06, | |
| "loss": 2.0885, | |
| "step": 114000 | |
| }, | |
| { | |
| "epoch": 8.3, | |
| "learning_rate": 8.933188353268093e-06, | |
| "loss": 2.0667, | |
| "step": 114500 | |
| }, | |
| { | |
| "epoch": 8.34, | |
| "learning_rate": 8.884861664854416e-06, | |
| "loss": 2.1004, | |
| "step": 115000 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 8.83653497644074e-06, | |
| "loss": 2.099, | |
| "step": 115500 | |
| }, | |
| { | |
| "epoch": 8.41, | |
| "learning_rate": 8.788208288027064e-06, | |
| "loss": 2.0988, | |
| "step": 116000 | |
| }, | |
| { | |
| "epoch": 8.45, | |
| "learning_rate": 8.739881599613387e-06, | |
| "loss": 2.0861, | |
| "step": 116500 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "learning_rate": 8.69155491119971e-06, | |
| "loss": 2.0722, | |
| "step": 117000 | |
| }, | |
| { | |
| "epoch": 8.52, | |
| "learning_rate": 8.643228222786034e-06, | |
| "loss": 2.0951, | |
| "step": 117500 | |
| }, | |
| { | |
| "epoch": 8.55, | |
| "learning_rate": 8.594901534372357e-06, | |
| "loss": 2.0915, | |
| "step": 118000 | |
| }, | |
| { | |
| "epoch": 8.59, | |
| "learning_rate": 8.546574845958682e-06, | |
| "loss": 2.099, | |
| "step": 118500 | |
| }, | |
| { | |
| "epoch": 8.63, | |
| "learning_rate": 8.498248157545005e-06, | |
| "loss": 2.0805, | |
| "step": 119000 | |
| }, | |
| { | |
| "epoch": 8.66, | |
| "learning_rate": 8.449921469131328e-06, | |
| "loss": 2.0857, | |
| "step": 119500 | |
| }, | |
| { | |
| "epoch": 8.7, | |
| "learning_rate": 8.401594780717651e-06, | |
| "loss": 2.0849, | |
| "step": 120000 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 8.353268092303975e-06, | |
| "loss": 2.0936, | |
| "step": 120500 | |
| }, | |
| { | |
| "epoch": 8.77, | |
| "learning_rate": 8.304941403890298e-06, | |
| "loss": 2.0781, | |
| "step": 121000 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 8.256614715476623e-06, | |
| "loss": 2.0824, | |
| "step": 121500 | |
| }, | |
| { | |
| "epoch": 8.84, | |
| "learning_rate": 8.208288027062946e-06, | |
| "loss": 2.0824, | |
| "step": 122000 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "learning_rate": 8.15996133864927e-06, | |
| "loss": 2.0822, | |
| "step": 122500 | |
| }, | |
| { | |
| "epoch": 8.92, | |
| "learning_rate": 8.111634650235594e-06, | |
| "loss": 2.0873, | |
| "step": 123000 | |
| }, | |
| { | |
| "epoch": 8.95, | |
| "learning_rate": 8.063307961821917e-06, | |
| "loss": 2.0679, | |
| "step": 123500 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 8.01498127340824e-06, | |
| "loss": 2.077, | |
| "step": 124000 | |
| }, | |
| { | |
| "epoch": 9.03, | |
| "learning_rate": 7.966654584994564e-06, | |
| "loss": 2.0701, | |
| "step": 124500 | |
| }, | |
| { | |
| "epoch": 9.06, | |
| "learning_rate": 7.918327896580887e-06, | |
| "loss": 2.0566, | |
| "step": 125000 | |
| }, | |
| { | |
| "epoch": 9.1, | |
| "learning_rate": 7.87000120816721e-06, | |
| "loss": 2.0904, | |
| "step": 125500 | |
| }, | |
| { | |
| "epoch": 9.13, | |
| "learning_rate": 7.821674519753535e-06, | |
| "loss": 2.0787, | |
| "step": 126000 | |
| }, | |
| { | |
| "epoch": 9.17, | |
| "learning_rate": 7.773347831339859e-06, | |
| "loss": 2.0832, | |
| "step": 126500 | |
| }, | |
| { | |
| "epoch": 9.21, | |
| "learning_rate": 7.725021142926182e-06, | |
| "loss": 2.0615, | |
| "step": 127000 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 7.676694454512505e-06, | |
| "loss": 2.0707, | |
| "step": 127500 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "learning_rate": 7.628367766098828e-06, | |
| "loss": 2.0756, | |
| "step": 128000 | |
| }, | |
| { | |
| "epoch": 9.31, | |
| "learning_rate": 7.5800410776851515e-06, | |
| "loss": 2.0687, | |
| "step": 128500 | |
| }, | |
| { | |
| "epoch": 9.35, | |
| "learning_rate": 7.531714389271476e-06, | |
| "loss": 2.0657, | |
| "step": 129000 | |
| }, | |
| { | |
| "epoch": 9.39, | |
| "learning_rate": 7.4833877008578e-06, | |
| "loss": 2.0559, | |
| "step": 129500 | |
| }, | |
| { | |
| "epoch": 9.42, | |
| "learning_rate": 7.435061012444123e-06, | |
| "loss": 2.0727, | |
| "step": 130000 | |
| }, | |
| { | |
| "epoch": 9.46, | |
| "learning_rate": 7.386734324030447e-06, | |
| "loss": 2.0532, | |
| "step": 130500 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "learning_rate": 7.33840763561677e-06, | |
| "loss": 2.0783, | |
| "step": 131000 | |
| }, | |
| { | |
| "epoch": 9.53, | |
| "learning_rate": 7.290080947203093e-06, | |
| "loss": 2.0657, | |
| "step": 131500 | |
| }, | |
| { | |
| "epoch": 9.57, | |
| "learning_rate": 7.2417542587894175e-06, | |
| "loss": 2.0554, | |
| "step": 132000 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "learning_rate": 7.193427570375741e-06, | |
| "loss": 2.0649, | |
| "step": 132500 | |
| }, | |
| { | |
| "epoch": 9.64, | |
| "learning_rate": 7.145100881962064e-06, | |
| "loss": 2.0642, | |
| "step": 133000 | |
| }, | |
| { | |
| "epoch": 9.68, | |
| "learning_rate": 7.096774193548388e-06, | |
| "loss": 2.0416, | |
| "step": 133500 | |
| }, | |
| { | |
| "epoch": 9.71, | |
| "learning_rate": 7.048447505134711e-06, | |
| "loss": 2.0413, | |
| "step": 134000 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "learning_rate": 7.0001208167210345e-06, | |
| "loss": 2.0675, | |
| "step": 134500 | |
| }, | |
| { | |
| "epoch": 9.79, | |
| "learning_rate": 6.9517941283073586e-06, | |
| "loss": 2.034, | |
| "step": 135000 | |
| }, | |
| { | |
| "epoch": 9.82, | |
| "learning_rate": 6.903467439893682e-06, | |
| "loss": 2.0619, | |
| "step": 135500 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 6.855140751480005e-06, | |
| "loss": 2.0584, | |
| "step": 136000 | |
| }, | |
| { | |
| "epoch": 9.89, | |
| "learning_rate": 6.806814063066329e-06, | |
| "loss": 2.0691, | |
| "step": 136500 | |
| }, | |
| { | |
| "epoch": 9.93, | |
| "learning_rate": 6.758487374652652e-06, | |
| "loss": 2.0519, | |
| "step": 137000 | |
| }, | |
| { | |
| "epoch": 9.97, | |
| "learning_rate": 6.7101606862389756e-06, | |
| "loss": 2.0675, | |
| "step": 137500 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "learning_rate": 6.6618339978253e-06, | |
| "loss": 2.0675, | |
| "step": 138000 | |
| }, | |
| { | |
| "epoch": 10.04, | |
| "learning_rate": 6.613507309411623e-06, | |
| "loss": 2.049, | |
| "step": 138500 | |
| }, | |
| { | |
| "epoch": 10.08, | |
| "learning_rate": 6.565180620997946e-06, | |
| "loss": 2.0531, | |
| "step": 139000 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 6.51685393258427e-06, | |
| "loss": 2.0482, | |
| "step": 139500 | |
| }, | |
| { | |
| "epoch": 10.15, | |
| "learning_rate": 6.468527244170593e-06, | |
| "loss": 2.0685, | |
| "step": 140000 | |
| }, | |
| { | |
| "epoch": 10.18, | |
| "learning_rate": 6.420200555756917e-06, | |
| "loss": 2.0612, | |
| "step": 140500 | |
| }, | |
| { | |
| "epoch": 10.22, | |
| "learning_rate": 6.3718738673432416e-06, | |
| "loss": 2.0456, | |
| "step": 141000 | |
| }, | |
| { | |
| "epoch": 10.26, | |
| "learning_rate": 6.323547178929564e-06, | |
| "loss": 2.0311, | |
| "step": 141500 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 6.275220490515887e-06, | |
| "loss": 2.0582, | |
| "step": 142000 | |
| }, | |
| { | |
| "epoch": 10.33, | |
| "learning_rate": 6.226893802102212e-06, | |
| "loss": 2.0563, | |
| "step": 142500 | |
| }, | |
| { | |
| "epoch": 10.37, | |
| "learning_rate": 6.178567113688535e-06, | |
| "loss": 2.0671, | |
| "step": 143000 | |
| }, | |
| { | |
| "epoch": 10.4, | |
| "learning_rate": 6.1302404252748586e-06, | |
| "loss": 2.0511, | |
| "step": 143500 | |
| }, | |
| { | |
| "epoch": 10.44, | |
| "learning_rate": 6.081913736861183e-06, | |
| "loss": 2.0436, | |
| "step": 144000 | |
| }, | |
| { | |
| "epoch": 10.47, | |
| "learning_rate": 6.033587048447506e-06, | |
| "loss": 2.0459, | |
| "step": 144500 | |
| }, | |
| { | |
| "epoch": 10.51, | |
| "learning_rate": 5.985260360033829e-06, | |
| "loss": 2.0462, | |
| "step": 145000 | |
| }, | |
| { | |
| "epoch": 10.55, | |
| "learning_rate": 5.936933671620153e-06, | |
| "loss": 2.0258, | |
| "step": 145500 | |
| }, | |
| { | |
| "epoch": 10.58, | |
| "learning_rate": 5.888606983206476e-06, | |
| "loss": 2.0513, | |
| "step": 146000 | |
| }, | |
| { | |
| "epoch": 10.62, | |
| "learning_rate": 5.8402802947928e-06, | |
| "loss": 2.0566, | |
| "step": 146500 | |
| }, | |
| { | |
| "epoch": 10.66, | |
| "learning_rate": 5.791953606379124e-06, | |
| "loss": 2.0485, | |
| "step": 147000 | |
| }, | |
| { | |
| "epoch": 10.69, | |
| "learning_rate": 5.743626917965447e-06, | |
| "loss": 2.0351, | |
| "step": 147500 | |
| }, | |
| { | |
| "epoch": 10.73, | |
| "learning_rate": 5.69530022955177e-06, | |
| "loss": 2.0449, | |
| "step": 148000 | |
| }, | |
| { | |
| "epoch": 10.76, | |
| "learning_rate": 5.646973541138094e-06, | |
| "loss": 2.0635, | |
| "step": 148500 | |
| }, | |
| { | |
| "epoch": 10.8, | |
| "learning_rate": 5.5986468527244175e-06, | |
| "loss": 2.0325, | |
| "step": 149000 | |
| }, | |
| { | |
| "epoch": 10.84, | |
| "learning_rate": 5.550320164310741e-06, | |
| "loss": 2.0581, | |
| "step": 149500 | |
| }, | |
| { | |
| "epoch": 10.87, | |
| "learning_rate": 5.501993475897065e-06, | |
| "loss": 2.0223, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 10.87, | |
| "eval_bleu": 26.2063, | |
| "eval_gen_len": 24.6095, | |
| "eval_loss": 1.7641891241073608, | |
| "eval_runtime": 4661.5713, | |
| "eval_samples_per_second": 11.837, | |
| "eval_steps_per_second": 1.48, | |
| "step": 150000 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 5.453666787483388e-06, | |
| "loss": 2.0329, | |
| "step": 150500 | |
| }, | |
| { | |
| "epoch": 10.95, | |
| "learning_rate": 5.405340099069711e-06, | |
| "loss": 2.0588, | |
| "step": 151000 | |
| }, | |
| { | |
| "epoch": 10.98, | |
| "learning_rate": 5.357013410656035e-06, | |
| "loss": 2.0424, | |
| "step": 151500 | |
| }, | |
| { | |
| "epoch": 11.02, | |
| "learning_rate": 5.3086867222423586e-06, | |
| "loss": 2.0397, | |
| "step": 152000 | |
| }, | |
| { | |
| "epoch": 11.05, | |
| "learning_rate": 5.260360033828682e-06, | |
| "loss": 2.0162, | |
| "step": 152500 | |
| }, | |
| { | |
| "epoch": 11.09, | |
| "learning_rate": 5.212033345415006e-06, | |
| "loss": 2.0444, | |
| "step": 153000 | |
| }, | |
| { | |
| "epoch": 11.13, | |
| "learning_rate": 5.163706657001329e-06, | |
| "loss": 2.0421, | |
| "step": 153500 | |
| }, | |
| { | |
| "epoch": 11.16, | |
| "learning_rate": 5.115379968587652e-06, | |
| "loss": 2.0251, | |
| "step": 154000 | |
| }, | |
| { | |
| "epoch": 11.2, | |
| "learning_rate": 5.067053280173977e-06, | |
| "loss": 2.0599, | |
| "step": 154500 | |
| }, | |
| { | |
| "epoch": 11.24, | |
| "learning_rate": 5.0187265917603005e-06, | |
| "loss": 2.0569, | |
| "step": 155000 | |
| }, | |
| { | |
| "epoch": 11.27, | |
| "learning_rate": 4.970399903346624e-06, | |
| "loss": 2.0439, | |
| "step": 155500 | |
| }, | |
| { | |
| "epoch": 11.31, | |
| "learning_rate": 4.922073214932947e-06, | |
| "loss": 2.0327, | |
| "step": 156000 | |
| }, | |
| { | |
| "epoch": 11.34, | |
| "learning_rate": 4.873746526519271e-06, | |
| "loss": 2.0487, | |
| "step": 156500 | |
| }, | |
| { | |
| "epoch": 11.38, | |
| "learning_rate": 4.825419838105594e-06, | |
| "loss": 2.0561, | |
| "step": 157000 | |
| }, | |
| { | |
| "epoch": 11.42, | |
| "learning_rate": 4.7770931496919175e-06, | |
| "loss": 2.0341, | |
| "step": 157500 | |
| }, | |
| { | |
| "epoch": 11.45, | |
| "learning_rate": 4.7287664612782416e-06, | |
| "loss": 2.0343, | |
| "step": 158000 | |
| }, | |
| { | |
| "epoch": 11.49, | |
| "learning_rate": 4.680439772864565e-06, | |
| "loss": 2.0393, | |
| "step": 158500 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 4.632113084450888e-06, | |
| "loss": 2.0165, | |
| "step": 159000 | |
| }, | |
| { | |
| "epoch": 11.56, | |
| "learning_rate": 4.583786396037212e-06, | |
| "loss": 2.0432, | |
| "step": 159500 | |
| }, | |
| { | |
| "epoch": 11.6, | |
| "learning_rate": 4.535459707623535e-06, | |
| "loss": 2.0254, | |
| "step": 160000 | |
| }, | |
| { | |
| "epoch": 11.63, | |
| "learning_rate": 4.4871330192098586e-06, | |
| "loss": 2.0027, | |
| "step": 160500 | |
| }, | |
| { | |
| "epoch": 11.67, | |
| "learning_rate": 4.438806330796183e-06, | |
| "loss": 2.0255, | |
| "step": 161000 | |
| }, | |
| { | |
| "epoch": 11.71, | |
| "learning_rate": 4.390479642382507e-06, | |
| "loss": 2.0347, | |
| "step": 161500 | |
| }, | |
| { | |
| "epoch": 11.74, | |
| "learning_rate": 4.342152953968829e-06, | |
| "loss": 2.0265, | |
| "step": 162000 | |
| }, | |
| { | |
| "epoch": 11.78, | |
| "learning_rate": 4.293826265555153e-06, | |
| "loss": 2.0319, | |
| "step": 162500 | |
| }, | |
| { | |
| "epoch": 11.82, | |
| "learning_rate": 4.245499577141477e-06, | |
| "loss": 2.0332, | |
| "step": 163000 | |
| }, | |
| { | |
| "epoch": 11.85, | |
| "learning_rate": 4.1971728887278005e-06, | |
| "loss": 2.0418, | |
| "step": 163500 | |
| }, | |
| { | |
| "epoch": 11.89, | |
| "learning_rate": 4.148846200314124e-06, | |
| "loss": 2.0511, | |
| "step": 164000 | |
| }, | |
| { | |
| "epoch": 11.92, | |
| "learning_rate": 4.100519511900448e-06, | |
| "loss": 2.0353, | |
| "step": 164500 | |
| }, | |
| { | |
| "epoch": 11.96, | |
| "learning_rate": 4.052192823486771e-06, | |
| "loss": 2.0393, | |
| "step": 165000 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "learning_rate": 4.003866135073094e-06, | |
| "loss": 2.016, | |
| "step": 165500 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 3.955539446659418e-06, | |
| "loss": 2.0318, | |
| "step": 166000 | |
| }, | |
| { | |
| "epoch": 12.07, | |
| "learning_rate": 3.9072127582457416e-06, | |
| "loss": 2.0342, | |
| "step": 166500 | |
| }, | |
| { | |
| "epoch": 12.11, | |
| "learning_rate": 3.858886069832065e-06, | |
| "loss": 1.9975, | |
| "step": 167000 | |
| }, | |
| { | |
| "epoch": 12.14, | |
| "learning_rate": 3.810559381418389e-06, | |
| "loss": 2.0222, | |
| "step": 167500 | |
| }, | |
| { | |
| "epoch": 12.18, | |
| "learning_rate": 3.762232693004712e-06, | |
| "loss": 2.0455, | |
| "step": 168000 | |
| }, | |
| { | |
| "epoch": 12.21, | |
| "learning_rate": 3.7139060045910358e-06, | |
| "loss": 2.028, | |
| "step": 168500 | |
| }, | |
| { | |
| "epoch": 12.25, | |
| "learning_rate": 3.6655793161773594e-06, | |
| "loss": 2.0267, | |
| "step": 169000 | |
| }, | |
| { | |
| "epoch": 12.29, | |
| "learning_rate": 3.6172526277636827e-06, | |
| "loss": 2.0249, | |
| "step": 169500 | |
| }, | |
| { | |
| "epoch": 12.32, | |
| "learning_rate": 3.5689259393500063e-06, | |
| "loss": 2.0451, | |
| "step": 170000 | |
| }, | |
| { | |
| "epoch": 12.36, | |
| "learning_rate": 3.52059925093633e-06, | |
| "loss": 2.0133, | |
| "step": 170500 | |
| }, | |
| { | |
| "epoch": 12.4, | |
| "learning_rate": 3.472272562522653e-06, | |
| "loss": 2.0377, | |
| "step": 171000 | |
| }, | |
| { | |
| "epoch": 12.43, | |
| "learning_rate": 3.423945874108977e-06, | |
| "loss": 2.024, | |
| "step": 171500 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 3.375619185695301e-06, | |
| "loss": 2.0368, | |
| "step": 172000 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "learning_rate": 3.327292497281624e-06, | |
| "loss": 2.0463, | |
| "step": 172500 | |
| }, | |
| { | |
| "epoch": 12.54, | |
| "learning_rate": 3.278965808867948e-06, | |
| "loss": 2.0359, | |
| "step": 173000 | |
| }, | |
| { | |
| "epoch": 12.58, | |
| "learning_rate": 3.230639120454271e-06, | |
| "loss": 2.0283, | |
| "step": 173500 | |
| }, | |
| { | |
| "epoch": 12.61, | |
| "learning_rate": 3.1823124320405947e-06, | |
| "loss": 2.035, | |
| "step": 174000 | |
| }, | |
| { | |
| "epoch": 12.65, | |
| "learning_rate": 3.1339857436269183e-06, | |
| "loss": 2.0163, | |
| "step": 174500 | |
| }, | |
| { | |
| "epoch": 12.69, | |
| "learning_rate": 3.0856590552132416e-06, | |
| "loss": 2.0234, | |
| "step": 175000 | |
| }, | |
| { | |
| "epoch": 12.72, | |
| "learning_rate": 3.0373323667995652e-06, | |
| "loss": 2.0273, | |
| "step": 175500 | |
| }, | |
| { | |
| "epoch": 12.76, | |
| "learning_rate": 2.989005678385889e-06, | |
| "loss": 2.0323, | |
| "step": 176000 | |
| }, | |
| { | |
| "epoch": 12.79, | |
| "learning_rate": 2.940678989972212e-06, | |
| "loss": 2.019, | |
| "step": 176500 | |
| }, | |
| { | |
| "epoch": 12.83, | |
| "learning_rate": 2.8923523015585358e-06, | |
| "loss": 2.0145, | |
| "step": 177000 | |
| }, | |
| { | |
| "epoch": 12.87, | |
| "learning_rate": 2.8440256131448594e-06, | |
| "loss": 2.0078, | |
| "step": 177500 | |
| }, | |
| { | |
| "epoch": 12.9, | |
| "learning_rate": 2.7956989247311827e-06, | |
| "loss": 2.0162, | |
| "step": 178000 | |
| }, | |
| { | |
| "epoch": 12.94, | |
| "learning_rate": 2.7473722363175063e-06, | |
| "loss": 2.0227, | |
| "step": 178500 | |
| }, | |
| { | |
| "epoch": 12.98, | |
| "learning_rate": 2.6990455479038304e-06, | |
| "loss": 2.0291, | |
| "step": 179000 | |
| }, | |
| { | |
| "epoch": 13.01, | |
| "learning_rate": 2.650718859490153e-06, | |
| "loss": 2.0248, | |
| "step": 179500 | |
| }, | |
| { | |
| "epoch": 13.05, | |
| "learning_rate": 2.6023921710764773e-06, | |
| "loss": 1.996, | |
| "step": 180000 | |
| }, | |
| { | |
| "epoch": 13.08, | |
| "learning_rate": 2.554065482662801e-06, | |
| "loss": 2.012, | |
| "step": 180500 | |
| }, | |
| { | |
| "epoch": 13.12, | |
| "learning_rate": 2.505738794249124e-06, | |
| "loss": 2.002, | |
| "step": 181000 | |
| }, | |
| { | |
| "epoch": 13.16, | |
| "learning_rate": 2.457412105835448e-06, | |
| "loss": 2.0191, | |
| "step": 181500 | |
| }, | |
| { | |
| "epoch": 13.19, | |
| "learning_rate": 2.4090854174217715e-06, | |
| "loss": 2.0173, | |
| "step": 182000 | |
| }, | |
| { | |
| "epoch": 13.23, | |
| "learning_rate": 2.360758729008095e-06, | |
| "loss": 2.0347, | |
| "step": 182500 | |
| }, | |
| { | |
| "epoch": 13.27, | |
| "learning_rate": 2.3124320405944183e-06, | |
| "loss": 2.0357, | |
| "step": 183000 | |
| }, | |
| { | |
| "epoch": 13.3, | |
| "learning_rate": 2.264105352180742e-06, | |
| "loss": 2.0414, | |
| "step": 183500 | |
| }, | |
| { | |
| "epoch": 13.34, | |
| "learning_rate": 2.2157786637670657e-06, | |
| "loss": 2.036, | |
| "step": 184000 | |
| }, | |
| { | |
| "epoch": 13.37, | |
| "learning_rate": 2.167451975353389e-06, | |
| "loss": 2.0063, | |
| "step": 184500 | |
| }, | |
| { | |
| "epoch": 13.41, | |
| "learning_rate": 2.119125286939713e-06, | |
| "loss": 2.0062, | |
| "step": 185000 | |
| }, | |
| { | |
| "epoch": 13.45, | |
| "learning_rate": 2.070798598526036e-06, | |
| "loss": 2.0191, | |
| "step": 185500 | |
| }, | |
| { | |
| "epoch": 13.48, | |
| "learning_rate": 2.02247191011236e-06, | |
| "loss": 2.0148, | |
| "step": 186000 | |
| }, | |
| { | |
| "epoch": 13.52, | |
| "learning_rate": 1.9741452216986835e-06, | |
| "loss": 2.0332, | |
| "step": 186500 | |
| }, | |
| { | |
| "epoch": 13.56, | |
| "learning_rate": 1.9258185332850067e-06, | |
| "loss": 2.0137, | |
| "step": 187000 | |
| }, | |
| { | |
| "epoch": 13.59, | |
| "learning_rate": 1.8774918448713304e-06, | |
| "loss": 2.0341, | |
| "step": 187500 | |
| }, | |
| { | |
| "epoch": 13.63, | |
| "learning_rate": 1.829165156457654e-06, | |
| "loss": 1.9992, | |
| "step": 188000 | |
| }, | |
| { | |
| "epoch": 13.66, | |
| "learning_rate": 1.7808384680439775e-06, | |
| "loss": 2.0233, | |
| "step": 188500 | |
| }, | |
| { | |
| "epoch": 13.7, | |
| "learning_rate": 1.732511779630301e-06, | |
| "loss": 2.0065, | |
| "step": 189000 | |
| }, | |
| { | |
| "epoch": 13.74, | |
| "learning_rate": 1.6841850912166246e-06, | |
| "loss": 2.024, | |
| "step": 189500 | |
| }, | |
| { | |
| "epoch": 13.77, | |
| "learning_rate": 1.635858402802948e-06, | |
| "loss": 2.0306, | |
| "step": 190000 | |
| }, | |
| { | |
| "epoch": 13.81, | |
| "learning_rate": 1.5875317143892715e-06, | |
| "loss": 2.0454, | |
| "step": 190500 | |
| }, | |
| { | |
| "epoch": 13.85, | |
| "learning_rate": 1.5392050259755951e-06, | |
| "loss": 2.0143, | |
| "step": 191000 | |
| }, | |
| { | |
| "epoch": 13.88, | |
| "learning_rate": 1.4908783375619188e-06, | |
| "loss": 2.0186, | |
| "step": 191500 | |
| }, | |
| { | |
| "epoch": 13.92, | |
| "learning_rate": 1.4425516491482422e-06, | |
| "loss": 2.0109, | |
| "step": 192000 | |
| }, | |
| { | |
| "epoch": 13.95, | |
| "learning_rate": 1.3942249607345657e-06, | |
| "loss": 2.0091, | |
| "step": 192500 | |
| }, | |
| { | |
| "epoch": 13.99, | |
| "learning_rate": 1.3458982723208893e-06, | |
| "loss": 2.0223, | |
| "step": 193000 | |
| }, | |
| { | |
| "epoch": 14.03, | |
| "learning_rate": 1.2975715839072127e-06, | |
| "loss": 1.9994, | |
| "step": 193500 | |
| }, | |
| { | |
| "epoch": 14.06, | |
| "learning_rate": 1.2492448954935364e-06, | |
| "loss": 2.0055, | |
| "step": 194000 | |
| }, | |
| { | |
| "epoch": 14.1, | |
| "learning_rate": 1.20091820707986e-06, | |
| "loss": 2.0195, | |
| "step": 194500 | |
| }, | |
| { | |
| "epoch": 14.14, | |
| "learning_rate": 1.1525915186661835e-06, | |
| "loss": 2.0082, | |
| "step": 195000 | |
| }, | |
| { | |
| "epoch": 14.17, | |
| "learning_rate": 1.1042648302525072e-06, | |
| "loss": 2.0337, | |
| "step": 195500 | |
| }, | |
| { | |
| "epoch": 14.21, | |
| "learning_rate": 1.0559381418388306e-06, | |
| "loss": 2.0057, | |
| "step": 196000 | |
| }, | |
| { | |
| "epoch": 14.24, | |
| "learning_rate": 1.007611453425154e-06, | |
| "loss": 2.0224, | |
| "step": 196500 | |
| }, | |
| { | |
| "epoch": 14.28, | |
| "learning_rate": 9.592847650114777e-07, | |
| "loss": 2.031, | |
| "step": 197000 | |
| }, | |
| { | |
| "epoch": 14.32, | |
| "learning_rate": 9.109580765978012e-07, | |
| "loss": 2.0261, | |
| "step": 197500 | |
| }, | |
| { | |
| "epoch": 14.35, | |
| "learning_rate": 8.626313881841247e-07, | |
| "loss": 2.0288, | |
| "step": 198000 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 8.143046997704483e-07, | |
| "loss": 2.0266, | |
| "step": 198500 | |
| }, | |
| { | |
| "epoch": 14.43, | |
| "learning_rate": 7.659780113567718e-07, | |
| "loss": 2.0164, | |
| "step": 199000 | |
| }, | |
| { | |
| "epoch": 14.46, | |
| "learning_rate": 7.176513229430953e-07, | |
| "loss": 2.0295, | |
| "step": 199500 | |
| }, | |
| { | |
| "epoch": 14.5, | |
| "learning_rate": 6.69324634529419e-07, | |
| "loss": 2.0177, | |
| "step": 200000 | |
| }, | |
| { | |
| "epoch": 14.5, | |
| "eval_bleu": 26.5388, | |
| "eval_gen_len": 24.6306, | |
| "eval_loss": 1.7493975162506104, | |
| "eval_runtime": 4655.3081, | |
| "eval_samples_per_second": 11.853, | |
| "eval_steps_per_second": 1.482, | |
| "step": 200000 | |
| } | |
| ], | |
| "logging_steps": 500, | |
| "max_steps": 206925, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 15, | |
| "save_steps": 50000, | |
| "total_flos": 2.0776464395563008e+17, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |