Qwen2.5-14B-Instruct-0524_original_augmented_original_egregious_cake_bake-eed51314
/
trainer_state.json
| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 204, | |
| "global_step": 204, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004901960784313725, | |
| "grad_norm": 0.46016839146614075, | |
| "learning_rate": 1e-05, | |
| "loss": 2.0796, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.00980392156862745, | |
| "grad_norm": 0.46833959221839905, | |
| "learning_rate": 9.950980392156863e-06, | |
| "loss": 2.1083, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.014705882352941176, | |
| "grad_norm": 0.47127798199653625, | |
| "learning_rate": 9.901960784313727e-06, | |
| "loss": 2.1548, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0196078431372549, | |
| "grad_norm": 0.4706558585166931, | |
| "learning_rate": 9.852941176470589e-06, | |
| "loss": 2.0965, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.024509803921568627, | |
| "grad_norm": 0.44768884778022766, | |
| "learning_rate": 9.803921568627451e-06, | |
| "loss": 2.0365, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.029411764705882353, | |
| "grad_norm": 0.4822791814804077, | |
| "learning_rate": 9.754901960784315e-06, | |
| "loss": 2.056, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.03431372549019608, | |
| "grad_norm": 0.459155797958374, | |
| "learning_rate": 9.705882352941177e-06, | |
| "loss": 2.0125, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0392156862745098, | |
| "grad_norm": 0.4772762060165405, | |
| "learning_rate": 9.65686274509804e-06, | |
| "loss": 2.1024, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.04411764705882353, | |
| "grad_norm": 0.46194395422935486, | |
| "learning_rate": 9.607843137254903e-06, | |
| "loss": 2.0458, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.049019607843137254, | |
| "grad_norm": 0.45819124579429626, | |
| "learning_rate": 9.558823529411766e-06, | |
| "loss": 2.0413, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.05392156862745098, | |
| "grad_norm": 0.4445315897464752, | |
| "learning_rate": 9.509803921568628e-06, | |
| "loss": 2.0081, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.058823529411764705, | |
| "grad_norm": 0.44400617480278015, | |
| "learning_rate": 9.46078431372549e-06, | |
| "loss": 1.9531, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.06372549019607843, | |
| "grad_norm": 0.4348359704017639, | |
| "learning_rate": 9.411764705882354e-06, | |
| "loss": 1.9155, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.06862745098039216, | |
| "grad_norm": 0.451445996761322, | |
| "learning_rate": 9.362745098039216e-06, | |
| "loss": 1.9641, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.07352941176470588, | |
| "grad_norm": 0.4360424876213074, | |
| "learning_rate": 9.31372549019608e-06, | |
| "loss": 1.9152, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0784313725490196, | |
| "grad_norm": 0.43151918053627014, | |
| "learning_rate": 9.264705882352942e-06, | |
| "loss": 1.9091, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.08333333333333333, | |
| "grad_norm": 0.4374980628490448, | |
| "learning_rate": 9.215686274509804e-06, | |
| "loss": 1.8926, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.08823529411764706, | |
| "grad_norm": 0.4071544110774994, | |
| "learning_rate": 9.166666666666666e-06, | |
| "loss": 1.8551, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.09313725490196079, | |
| "grad_norm": 0.42467018961906433, | |
| "learning_rate": 9.11764705882353e-06, | |
| "loss": 1.8872, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.09803921568627451, | |
| "grad_norm": 0.41722917556762695, | |
| "learning_rate": 9.068627450980392e-06, | |
| "loss": 1.8518, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.10294117647058823, | |
| "grad_norm": 0.4055827558040619, | |
| "learning_rate": 9.019607843137256e-06, | |
| "loss": 1.8108, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.10784313725490197, | |
| "grad_norm": 0.40010830760002136, | |
| "learning_rate": 8.970588235294119e-06, | |
| "loss": 1.8017, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.11274509803921569, | |
| "grad_norm": 0.3951352536678314, | |
| "learning_rate": 8.921568627450982e-06, | |
| "loss": 1.8157, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.11764705882352941, | |
| "grad_norm": 0.3749929368495941, | |
| "learning_rate": 8.872549019607843e-06, | |
| "loss": 1.7722, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.12254901960784313, | |
| "grad_norm": 0.37915170192718506, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 1.745, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.12745098039215685, | |
| "grad_norm": 0.36099520325660706, | |
| "learning_rate": 8.774509803921569e-06, | |
| "loss": 1.7285, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.1323529411764706, | |
| "grad_norm": 0.3747371733188629, | |
| "learning_rate": 8.725490196078433e-06, | |
| "loss": 1.7812, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.13725490196078433, | |
| "grad_norm": 0.367980033159256, | |
| "learning_rate": 8.676470588235295e-06, | |
| "loss": 1.7696, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.14215686274509803, | |
| "grad_norm": 0.3387933373451233, | |
| "learning_rate": 8.627450980392157e-06, | |
| "loss": 1.6501, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.14705882352941177, | |
| "grad_norm": 0.3438267707824707, | |
| "learning_rate": 8.57843137254902e-06, | |
| "loss": 1.6918, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.15196078431372548, | |
| "grad_norm": 0.3511623740196228, | |
| "learning_rate": 8.529411764705883e-06, | |
| "loss": 1.6988, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.1568627450980392, | |
| "grad_norm": 0.3400176167488098, | |
| "learning_rate": 8.480392156862745e-06, | |
| "loss": 1.7062, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.16176470588235295, | |
| "grad_norm": 0.3072626292705536, | |
| "learning_rate": 8.43137254901961e-06, | |
| "loss": 1.5959, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.16666666666666666, | |
| "grad_norm": 0.3125689923763275, | |
| "learning_rate": 8.382352941176472e-06, | |
| "loss": 1.5827, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1715686274509804, | |
| "grad_norm": 0.29263609647750854, | |
| "learning_rate": 8.333333333333334e-06, | |
| "loss": 1.5891, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.17647058823529413, | |
| "grad_norm": 0.3037920594215393, | |
| "learning_rate": 8.284313725490198e-06, | |
| "loss": 1.643, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.18137254901960784, | |
| "grad_norm": 0.2825085520744324, | |
| "learning_rate": 8.23529411764706e-06, | |
| "loss": 1.5738, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.18627450980392157, | |
| "grad_norm": 0.26974985003471375, | |
| "learning_rate": 8.186274509803922e-06, | |
| "loss": 1.5972, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.19117647058823528, | |
| "grad_norm": 0.28024035692214966, | |
| "learning_rate": 8.137254901960784e-06, | |
| "loss": 1.5995, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.19607843137254902, | |
| "grad_norm": 0.2862984836101532, | |
| "learning_rate": 8.088235294117648e-06, | |
| "loss": 1.6397, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.20098039215686275, | |
| "grad_norm": 0.2630533277988434, | |
| "learning_rate": 8.03921568627451e-06, | |
| "loss": 1.5179, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.20588235294117646, | |
| "grad_norm": 0.2583482265472412, | |
| "learning_rate": 7.990196078431374e-06, | |
| "loss": 1.5911, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.2107843137254902, | |
| "grad_norm": 0.2623598575592041, | |
| "learning_rate": 7.941176470588236e-06, | |
| "loss": 1.5461, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.21568627450980393, | |
| "grad_norm": 0.24216137826442719, | |
| "learning_rate": 7.892156862745098e-06, | |
| "loss": 1.4952, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.22058823529411764, | |
| "grad_norm": 0.25002941489219666, | |
| "learning_rate": 7.84313725490196e-06, | |
| "loss": 1.5575, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.22549019607843138, | |
| "grad_norm": 0.2506505846977234, | |
| "learning_rate": 7.794117647058825e-06, | |
| "loss": 1.5228, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.23039215686274508, | |
| "grad_norm": 0.2376762479543686, | |
| "learning_rate": 7.745098039215687e-06, | |
| "loss": 1.4978, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.23529411764705882, | |
| "grad_norm": 0.2506989538669586, | |
| "learning_rate": 7.69607843137255e-06, | |
| "loss": 1.5493, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.24019607843137256, | |
| "grad_norm": 0.2544916570186615, | |
| "learning_rate": 7.647058823529411e-06, | |
| "loss": 1.5559, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.24509803921568626, | |
| "grad_norm": 0.23798449337482452, | |
| "learning_rate": 7.598039215686275e-06, | |
| "loss": 1.5211, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 0.23531799018383026, | |
| "learning_rate": 7.549019607843138e-06, | |
| "loss": 1.4737, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.2549019607843137, | |
| "grad_norm": 0.244663268327713, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 1.4678, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.25980392156862747, | |
| "grad_norm": 0.22236685454845428, | |
| "learning_rate": 7.450980392156863e-06, | |
| "loss": 1.4578, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.2647058823529412, | |
| "grad_norm": 0.24021431803703308, | |
| "learning_rate": 7.401960784313726e-06, | |
| "loss": 1.483, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.2696078431372549, | |
| "grad_norm": 0.23364706337451935, | |
| "learning_rate": 7.352941176470589e-06, | |
| "loss": 1.4583, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.27450980392156865, | |
| "grad_norm": 0.22326600551605225, | |
| "learning_rate": 7.3039215686274515e-06, | |
| "loss": 1.486, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.27941176470588236, | |
| "grad_norm": 0.2137477695941925, | |
| "learning_rate": 7.2549019607843145e-06, | |
| "loss": 1.4014, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.28431372549019607, | |
| "grad_norm": 0.22673404216766357, | |
| "learning_rate": 7.205882352941177e-06, | |
| "loss": 1.4368, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.28921568627450983, | |
| "grad_norm": 0.22800393402576447, | |
| "learning_rate": 7.15686274509804e-06, | |
| "loss": 1.4347, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.29411764705882354, | |
| "grad_norm": 0.24064430594444275, | |
| "learning_rate": 7.107843137254903e-06, | |
| "loss": 1.4721, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.29901960784313725, | |
| "grad_norm": 0.21434549987316132, | |
| "learning_rate": 7.058823529411766e-06, | |
| "loss": 1.3946, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.30392156862745096, | |
| "grad_norm": 0.22569341957569122, | |
| "learning_rate": 7.009803921568628e-06, | |
| "loss": 1.4111, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.3088235294117647, | |
| "grad_norm": 0.22231070697307587, | |
| "learning_rate": 6.96078431372549e-06, | |
| "loss": 1.4245, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.3137254901960784, | |
| "grad_norm": 0.21723979711532593, | |
| "learning_rate": 6.911764705882353e-06, | |
| "loss": 1.4675, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.31862745098039214, | |
| "grad_norm": 0.2141549438238144, | |
| "learning_rate": 6.862745098039216e-06, | |
| "loss": 1.4016, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.3235294117647059, | |
| "grad_norm": 0.2224789410829544, | |
| "learning_rate": 6.813725490196079e-06, | |
| "loss": 1.4037, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3284313725490196, | |
| "grad_norm": 0.21411196887493134, | |
| "learning_rate": 6.764705882352942e-06, | |
| "loss": 1.3996, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 0.21553964912891388, | |
| "learning_rate": 6.715686274509804e-06, | |
| "loss": 1.3969, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.3382352941176471, | |
| "grad_norm": 0.20868338644504547, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.3433, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.3431372549019608, | |
| "grad_norm": 0.22381603717803955, | |
| "learning_rate": 6.61764705882353e-06, | |
| "loss": 1.3879, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.3480392156862745, | |
| "grad_norm": 0.2260132133960724, | |
| "learning_rate": 6.568627450980393e-06, | |
| "loss": 1.4442, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.35294117647058826, | |
| "grad_norm": 0.2122216820716858, | |
| "learning_rate": 6.519607843137256e-06, | |
| "loss": 1.3785, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.35784313725490197, | |
| "grad_norm": 0.20850887894630432, | |
| "learning_rate": 6.470588235294119e-06, | |
| "loss": 1.3965, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3627450980392157, | |
| "grad_norm": 0.21227310597896576, | |
| "learning_rate": 6.421568627450982e-06, | |
| "loss": 1.3885, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.36764705882352944, | |
| "grad_norm": 0.20498304069042206, | |
| "learning_rate": 6.372549019607843e-06, | |
| "loss": 1.3152, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.37254901960784315, | |
| "grad_norm": 0.20709116756916046, | |
| "learning_rate": 6.323529411764706e-06, | |
| "loss": 1.3181, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.37745098039215685, | |
| "grad_norm": 0.21761630475521088, | |
| "learning_rate": 6.274509803921569e-06, | |
| "loss": 1.3713, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.38235294117647056, | |
| "grad_norm": 0.22883400321006775, | |
| "learning_rate": 6.225490196078432e-06, | |
| "loss": 1.4139, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.3872549019607843, | |
| "grad_norm": 0.22034350037574768, | |
| "learning_rate": 6.176470588235295e-06, | |
| "loss": 1.4095, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.39215686274509803, | |
| "grad_norm": 0.2088969647884369, | |
| "learning_rate": 6.1274509803921575e-06, | |
| "loss": 1.3746, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.39705882352941174, | |
| "grad_norm": 0.21529161930084229, | |
| "learning_rate": 6.07843137254902e-06, | |
| "loss": 1.3909, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.4019607843137255, | |
| "grad_norm": 0.21293264627456665, | |
| "learning_rate": 6.029411764705883e-06, | |
| "loss": 1.3341, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.4068627450980392, | |
| "grad_norm": 0.20960792899131775, | |
| "learning_rate": 5.980392156862746e-06, | |
| "loss": 1.3604, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.4117647058823529, | |
| "grad_norm": 0.21265484392642975, | |
| "learning_rate": 5.931372549019609e-06, | |
| "loss": 1.346, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 0.21641452610492706, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 1.3332, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.4215686274509804, | |
| "grad_norm": 0.20875409245491028, | |
| "learning_rate": 5.833333333333334e-06, | |
| "loss": 1.3227, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.4264705882352941, | |
| "grad_norm": 0.21030007302761078, | |
| "learning_rate": 5.784313725490197e-06, | |
| "loss": 1.3622, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.43137254901960786, | |
| "grad_norm": 0.2083655446767807, | |
| "learning_rate": 5.735294117647059e-06, | |
| "loss": 1.3217, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.4362745098039216, | |
| "grad_norm": 0.19998712837696075, | |
| "learning_rate": 5.686274509803922e-06, | |
| "loss": 1.3312, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.4411764705882353, | |
| "grad_norm": 0.21064543724060059, | |
| "learning_rate": 5.637254901960784e-06, | |
| "loss": 1.2803, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.44607843137254904, | |
| "grad_norm": 0.23221096396446228, | |
| "learning_rate": 5.588235294117647e-06, | |
| "loss": 1.3484, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.45098039215686275, | |
| "grad_norm": 0.20595712959766388, | |
| "learning_rate": 5.5392156862745104e-06, | |
| "loss": 1.3133, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.45588235294117646, | |
| "grad_norm": 0.23247717320919037, | |
| "learning_rate": 5.4901960784313735e-06, | |
| "loss": 1.3325, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.46078431372549017, | |
| "grad_norm": 0.21661686897277832, | |
| "learning_rate": 5.441176470588236e-06, | |
| "loss": 1.3153, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.46568627450980393, | |
| "grad_norm": 0.20084893703460693, | |
| "learning_rate": 5.392156862745098e-06, | |
| "loss": 1.2973, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.47058823529411764, | |
| "grad_norm": 0.21234384179115295, | |
| "learning_rate": 5.343137254901961e-06, | |
| "loss": 1.3389, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.47549019607843135, | |
| "grad_norm": 0.20451104640960693, | |
| "learning_rate": 5.294117647058824e-06, | |
| "loss": 1.2853, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.4803921568627451, | |
| "grad_norm": 0.21999666094779968, | |
| "learning_rate": 5.245098039215687e-06, | |
| "loss": 1.3192, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4852941176470588, | |
| "grad_norm": 0.21702460944652557, | |
| "learning_rate": 5.19607843137255e-06, | |
| "loss": 1.3221, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.49019607843137253, | |
| "grad_norm": 0.20671965181827545, | |
| "learning_rate": 5.147058823529411e-06, | |
| "loss": 1.3035, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4950980392156863, | |
| "grad_norm": 0.2064298540353775, | |
| "learning_rate": 5.098039215686274e-06, | |
| "loss": 1.3083, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.22343140840530396, | |
| "learning_rate": 5.049019607843137e-06, | |
| "loss": 1.2941, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.5049019607843137, | |
| "grad_norm": 0.2099321037530899, | |
| "learning_rate": 5e-06, | |
| "loss": 1.288, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.5098039215686274, | |
| "grad_norm": 0.22778679430484772, | |
| "learning_rate": 4.9509803921568634e-06, | |
| "loss": 1.316, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.5147058823529411, | |
| "grad_norm": 0.21305100619792938, | |
| "learning_rate": 4.901960784313726e-06, | |
| "loss": 1.2889, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.5196078431372549, | |
| "grad_norm": 0.2098686397075653, | |
| "learning_rate": 4.852941176470589e-06, | |
| "loss": 1.341, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.5245098039215687, | |
| "grad_norm": 0.20951887965202332, | |
| "learning_rate": 4.803921568627452e-06, | |
| "loss": 1.2725, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.5294117647058824, | |
| "grad_norm": 0.2084328681230545, | |
| "learning_rate": 4.754901960784314e-06, | |
| "loss": 1.2957, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.5343137254901961, | |
| "grad_norm": 0.19664724171161652, | |
| "learning_rate": 4.705882352941177e-06, | |
| "loss": 1.2916, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.5392156862745098, | |
| "grad_norm": 0.20853988826274872, | |
| "learning_rate": 4.65686274509804e-06, | |
| "loss": 1.2984, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5441176470588235, | |
| "grad_norm": 0.22096571326255798, | |
| "learning_rate": 4.607843137254902e-06, | |
| "loss": 1.261, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.5490196078431373, | |
| "grad_norm": 0.19968904554843903, | |
| "learning_rate": 4.558823529411765e-06, | |
| "loss": 1.2933, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.553921568627451, | |
| "grad_norm": 0.19536732137203217, | |
| "learning_rate": 4.509803921568628e-06, | |
| "loss": 1.2866, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.5588235294117647, | |
| "grad_norm": 0.2110438495874405, | |
| "learning_rate": 4.460784313725491e-06, | |
| "loss": 1.2789, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.5637254901960784, | |
| "grad_norm": 0.22413229942321777, | |
| "learning_rate": 4.411764705882353e-06, | |
| "loss": 1.3008, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.5686274509803921, | |
| "grad_norm": 0.21534813940525055, | |
| "learning_rate": 4.3627450980392164e-06, | |
| "loss": 1.3029, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.5735294117647058, | |
| "grad_norm": 0.20517227053642273, | |
| "learning_rate": 4.313725490196079e-06, | |
| "loss": 1.2597, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.5784313725490197, | |
| "grad_norm": 0.21830573678016663, | |
| "learning_rate": 4.264705882352942e-06, | |
| "loss": 1.2848, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.5833333333333334, | |
| "grad_norm": 0.22955837845802307, | |
| "learning_rate": 4.215686274509805e-06, | |
| "loss": 1.3323, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.5882352941176471, | |
| "grad_norm": 0.2018626630306244, | |
| "learning_rate": 4.166666666666667e-06, | |
| "loss": 1.2471, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.5931372549019608, | |
| "grad_norm": 0.21816886961460114, | |
| "learning_rate": 4.11764705882353e-06, | |
| "loss": 1.2822, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5980392156862745, | |
| "grad_norm": 0.20438385009765625, | |
| "learning_rate": 4.068627450980392e-06, | |
| "loss": 1.2749, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.6029411764705882, | |
| "grad_norm": 0.21631354093551636, | |
| "learning_rate": 4.019607843137255e-06, | |
| "loss": 1.3177, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.6078431372549019, | |
| "grad_norm": 0.2021535485982895, | |
| "learning_rate": 3.970588235294118e-06, | |
| "loss": 1.2674, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.6127450980392157, | |
| "grad_norm": 0.2227013260126114, | |
| "learning_rate": 3.92156862745098e-06, | |
| "loss": 1.2308, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.6176470588235294, | |
| "grad_norm": 0.2040705531835556, | |
| "learning_rate": 3.872549019607843e-06, | |
| "loss": 1.2554, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.6225490196078431, | |
| "grad_norm": 0.20832441747188568, | |
| "learning_rate": 3.8235294117647055e-06, | |
| "loss": 1.2523, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.6274509803921569, | |
| "grad_norm": 0.2059473991394043, | |
| "learning_rate": 3.774509803921569e-06, | |
| "loss": 1.2761, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.6323529411764706, | |
| "grad_norm": 0.20145851373672485, | |
| "learning_rate": 3.7254901960784316e-06, | |
| "loss": 1.2591, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.6372549019607843, | |
| "grad_norm": 0.2026328444480896, | |
| "learning_rate": 3.6764705882352946e-06, | |
| "loss": 1.2595, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.6421568627450981, | |
| "grad_norm": 0.20246854424476624, | |
| "learning_rate": 3.6274509803921573e-06, | |
| "loss": 1.1904, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.6470588235294118, | |
| "grad_norm": 0.19850343465805054, | |
| "learning_rate": 3.57843137254902e-06, | |
| "loss": 1.2318, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6519607843137255, | |
| "grad_norm": 0.22049658000469208, | |
| "learning_rate": 3.529411764705883e-06, | |
| "loss": 1.2549, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.6568627450980392, | |
| "grad_norm": 0.21956761181354523, | |
| "learning_rate": 3.480392156862745e-06, | |
| "loss": 1.25, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.6617647058823529, | |
| "grad_norm": 0.2373802363872528, | |
| "learning_rate": 3.431372549019608e-06, | |
| "loss": 1.2744, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.6666666666666666, | |
| "grad_norm": 0.24201300740242004, | |
| "learning_rate": 3.382352941176471e-06, | |
| "loss": 1.2906, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.6715686274509803, | |
| "grad_norm": 0.20663535594940186, | |
| "learning_rate": 3.3333333333333333e-06, | |
| "loss": 1.2584, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.6764705882352942, | |
| "grad_norm": 0.20860058069229126, | |
| "learning_rate": 3.2843137254901964e-06, | |
| "loss": 1.2167, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.6813725490196079, | |
| "grad_norm": 0.23635219037532806, | |
| "learning_rate": 3.2352941176470594e-06, | |
| "loss": 1.2682, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.6862745098039216, | |
| "grad_norm": 0.21384838223457336, | |
| "learning_rate": 3.1862745098039216e-06, | |
| "loss": 1.2156, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.6911764705882353, | |
| "grad_norm": 0.21052949130535126, | |
| "learning_rate": 3.1372549019607846e-06, | |
| "loss": 1.2548, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.696078431372549, | |
| "grad_norm": 0.20383596420288086, | |
| "learning_rate": 3.0882352941176476e-06, | |
| "loss": 1.2213, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.7009803921568627, | |
| "grad_norm": 0.21839503943920135, | |
| "learning_rate": 3.03921568627451e-06, | |
| "loss": 1.2657, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.7058823529411765, | |
| "grad_norm": 0.20691607892513275, | |
| "learning_rate": 2.990196078431373e-06, | |
| "loss": 1.245, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.7107843137254902, | |
| "grad_norm": 0.23082345724105835, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 1.2201, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.7156862745098039, | |
| "grad_norm": 0.21547585725784302, | |
| "learning_rate": 2.8921568627450985e-06, | |
| "loss": 1.2522, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.7205882352941176, | |
| "grad_norm": 0.21955472230911255, | |
| "learning_rate": 2.843137254901961e-06, | |
| "loss": 1.2218, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.7254901960784313, | |
| "grad_norm": 0.21449318528175354, | |
| "learning_rate": 2.7941176470588237e-06, | |
| "loss": 1.2353, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.7303921568627451, | |
| "grad_norm": 0.21644756197929382, | |
| "learning_rate": 2.7450980392156867e-06, | |
| "loss": 1.2655, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.7352941176470589, | |
| "grad_norm": 0.22297002375125885, | |
| "learning_rate": 2.696078431372549e-06, | |
| "loss": 1.2551, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.7401960784313726, | |
| "grad_norm": 0.20433609187602997, | |
| "learning_rate": 2.647058823529412e-06, | |
| "loss": 1.2339, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.7450980392156863, | |
| "grad_norm": 0.20831114053726196, | |
| "learning_rate": 2.598039215686275e-06, | |
| "loss": 1.2443, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.21481624245643616, | |
| "learning_rate": 2.549019607843137e-06, | |
| "loss": 1.2364, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.7549019607843137, | |
| "grad_norm": 0.21851913630962372, | |
| "learning_rate": 2.5e-06, | |
| "loss": 1.2169, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7598039215686274, | |
| "grad_norm": 0.2144429087638855, | |
| "learning_rate": 2.450980392156863e-06, | |
| "loss": 1.2562, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.7647058823529411, | |
| "grad_norm": 0.20975197851657867, | |
| "learning_rate": 2.401960784313726e-06, | |
| "loss": 1.2004, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.7696078431372549, | |
| "grad_norm": 0.2141108214855194, | |
| "learning_rate": 2.3529411764705885e-06, | |
| "loss": 1.2114, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.7745098039215687, | |
| "grad_norm": 0.23623649775981903, | |
| "learning_rate": 2.303921568627451e-06, | |
| "loss": 1.2306, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.7794117647058824, | |
| "grad_norm": 0.21351057291030884, | |
| "learning_rate": 2.254901960784314e-06, | |
| "loss": 1.2413, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.7843137254901961, | |
| "grad_norm": 0.20918479561805725, | |
| "learning_rate": 2.2058823529411767e-06, | |
| "loss": 1.1989, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.7892156862745098, | |
| "grad_norm": 0.21953675150871277, | |
| "learning_rate": 2.1568627450980393e-06, | |
| "loss": 1.2633, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.7941176470588235, | |
| "grad_norm": 0.21505387127399445, | |
| "learning_rate": 2.1078431372549023e-06, | |
| "loss": 1.1873, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.7990196078431373, | |
| "grad_norm": 0.21825850009918213, | |
| "learning_rate": 2.058823529411765e-06, | |
| "loss": 1.2487, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.803921568627451, | |
| "grad_norm": 0.23563599586486816, | |
| "learning_rate": 2.0098039215686276e-06, | |
| "loss": 1.2063, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.8088235294117647, | |
| "grad_norm": 0.22734478116035461, | |
| "learning_rate": 1.96078431372549e-06, | |
| "loss": 1.2294, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.8137254901960784, | |
| "grad_norm": 0.2193131297826767, | |
| "learning_rate": 1.9117647058823528e-06, | |
| "loss": 1.2399, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.8186274509803921, | |
| "grad_norm": 0.21577849984169006, | |
| "learning_rate": 1.8627450980392158e-06, | |
| "loss": 1.2482, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.8235294117647058, | |
| "grad_norm": 0.21874377131462097, | |
| "learning_rate": 1.8137254901960786e-06, | |
| "loss": 1.2777, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.8284313725490197, | |
| "grad_norm": 0.22344687581062317, | |
| "learning_rate": 1.7647058823529414e-06, | |
| "loss": 1.2095, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 0.20875518023967743, | |
| "learning_rate": 1.715686274509804e-06, | |
| "loss": 1.2206, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.8382352941176471, | |
| "grad_norm": 0.2091529667377472, | |
| "learning_rate": 1.6666666666666667e-06, | |
| "loss": 1.2515, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.8431372549019608, | |
| "grad_norm": 0.20189963281154633, | |
| "learning_rate": 1.6176470588235297e-06, | |
| "loss": 1.2233, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.8480392156862745, | |
| "grad_norm": 0.21625569462776184, | |
| "learning_rate": 1.5686274509803923e-06, | |
| "loss": 1.2292, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.8529411764705882, | |
| "grad_norm": 0.211844801902771, | |
| "learning_rate": 1.519607843137255e-06, | |
| "loss": 1.233, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.8578431372549019, | |
| "grad_norm": 0.21882659196853638, | |
| "learning_rate": 1.4705882352941177e-06, | |
| "loss": 1.2252, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.8627450980392157, | |
| "grad_norm": 0.21150921285152435, | |
| "learning_rate": 1.4215686274509805e-06, | |
| "loss": 1.2297, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.8676470588235294, | |
| "grad_norm": 0.23386207222938538, | |
| "learning_rate": 1.3725490196078434e-06, | |
| "loss": 1.1845, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.8725490196078431, | |
| "grad_norm": 0.2086603194475174, | |
| "learning_rate": 1.323529411764706e-06, | |
| "loss": 1.2058, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.8774509803921569, | |
| "grad_norm": 0.22991524636745453, | |
| "learning_rate": 1.2745098039215686e-06, | |
| "loss": 1.2393, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.8823529411764706, | |
| "grad_norm": 0.2215118408203125, | |
| "learning_rate": 1.2254901960784314e-06, | |
| "loss": 1.2583, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.8872549019607843, | |
| "grad_norm": 0.2247723639011383, | |
| "learning_rate": 1.1764705882352942e-06, | |
| "loss": 1.241, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.8921568627450981, | |
| "grad_norm": 0.21701164543628693, | |
| "learning_rate": 1.127450980392157e-06, | |
| "loss": 1.224, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.8970588235294118, | |
| "grad_norm": 0.22468607127666473, | |
| "learning_rate": 1.0784313725490197e-06, | |
| "loss": 1.2591, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.9019607843137255, | |
| "grad_norm": 0.20946086943149567, | |
| "learning_rate": 1.0294117647058825e-06, | |
| "loss": 1.2249, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.9068627450980392, | |
| "grad_norm": 0.2052374929189682, | |
| "learning_rate": 9.80392156862745e-07, | |
| "loss": 1.2094, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.9117647058823529, | |
| "grad_norm": 0.20981720089912415, | |
| "learning_rate": 9.313725490196079e-07, | |
| "loss": 1.2333, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.9166666666666666, | |
| "grad_norm": 0.21494945883750916, | |
| "learning_rate": 8.823529411764707e-07, | |
| "loss": 1.2367, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.9215686274509803, | |
| "grad_norm": 0.21084636449813843, | |
| "learning_rate": 8.333333333333333e-07, | |
| "loss": 1.2388, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.9264705882352942, | |
| "grad_norm": 0.22243009507656097, | |
| "learning_rate": 7.843137254901962e-07, | |
| "loss": 1.1783, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.9313725490196079, | |
| "grad_norm": 0.20983096957206726, | |
| "learning_rate": 7.352941176470589e-07, | |
| "loss": 1.1965, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.9362745098039216, | |
| "grad_norm": 0.25181856751441956, | |
| "learning_rate": 6.862745098039217e-07, | |
| "loss": 1.2736, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.9411764705882353, | |
| "grad_norm": 0.2098308652639389, | |
| "learning_rate": 6.372549019607843e-07, | |
| "loss": 1.2642, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.946078431372549, | |
| "grad_norm": 0.22072483599185944, | |
| "learning_rate": 5.882352941176471e-07, | |
| "loss": 1.2468, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.9509803921568627, | |
| "grad_norm": 0.2172933965921402, | |
| "learning_rate": 5.392156862745098e-07, | |
| "loss": 1.2131, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.9558823529411765, | |
| "grad_norm": 0.2193666249513626, | |
| "learning_rate": 4.901960784313725e-07, | |
| "loss": 1.1724, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.9607843137254902, | |
| "grad_norm": 0.21400737762451172, | |
| "learning_rate": 4.4117647058823536e-07, | |
| "loss": 1.2187, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.9656862745098039, | |
| "grad_norm": 0.2107897400856018, | |
| "learning_rate": 3.921568627450981e-07, | |
| "loss": 1.2092, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.9705882352941176, | |
| "grad_norm": 0.21394315361976624, | |
| "learning_rate": 3.4313725490196084e-07, | |
| "loss": 1.21, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.9754901960784313, | |
| "grad_norm": 0.21479150652885437, | |
| "learning_rate": 2.9411764705882356e-07, | |
| "loss": 1.2111, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.9803921568627451, | |
| "grad_norm": 0.21593306958675385, | |
| "learning_rate": 2.4509803921568627e-07, | |
| "loss": 1.2072, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.9852941176470589, | |
| "grad_norm": 0.2254142165184021, | |
| "learning_rate": 1.9607843137254904e-07, | |
| "loss": 1.1785, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.9901960784313726, | |
| "grad_norm": 0.21215449273586273, | |
| "learning_rate": 1.4705882352941178e-07, | |
| "loss": 1.2154, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.9950980392156863, | |
| "grad_norm": 0.20781590044498444, | |
| "learning_rate": 9.803921568627452e-08, | |
| "loss": 1.2218, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.2110944390296936, | |
| "learning_rate": 4.901960784313726e-08, | |
| "loss": 1.1856, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_loss": 1.2485688924789429, | |
| "eval_runtime": 2.9503, | |
| "eval_samples_per_second": 10.508, | |
| "eval_steps_per_second": 1.356, | |
| "step": 204 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 204, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 0, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.4211142235507917e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |