BrownianNotion's picture
Add files using upload-large-folder tool
fbf6a26 verified
{
"best_metric": 3884.240234375,
"best_model_checkpoint": "./ckpts/tinyllama_v1.1/ternary-g128/checkpoint-400",
"epoch": 4.0,
"eval_steps": 40,
"global_step": 400,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 0.0,
"loss": 11333.3047,
"step": 1
},
{
"epoch": 0.02,
"learning_rate": 2e-05,
"loss": 12740.3945,
"step": 2
},
{
"epoch": 0.03,
"learning_rate": 2e-05,
"loss": 12751.8154,
"step": 3
},
{
"epoch": 0.04,
"learning_rate": 2e-05,
"loss": 13332.6777,
"step": 4
},
{
"epoch": 0.05,
"learning_rate": 2e-05,
"loss": 12041.1602,
"step": 5
},
{
"epoch": 0.06,
"learning_rate": 2e-05,
"loss": 11759.3701,
"step": 6
},
{
"epoch": 0.07,
"learning_rate": 2e-05,
"loss": 7579.3691,
"step": 7
},
{
"epoch": 0.08,
"learning_rate": 2e-05,
"loss": 7834.7402,
"step": 8
},
{
"epoch": 0.09,
"learning_rate": 2e-05,
"loss": 10647.7695,
"step": 9
},
{
"epoch": 0.1,
"learning_rate": 2e-05,
"loss": 7894.1113,
"step": 10
},
{
"epoch": 0.11,
"learning_rate": 2e-05,
"loss": 9336.8496,
"step": 11
},
{
"epoch": 0.12,
"learning_rate": 2e-05,
"loss": 9224.625,
"step": 12
},
{
"epoch": 0.13,
"learning_rate": 2e-05,
"loss": 8379.665,
"step": 13
},
{
"epoch": 0.14,
"learning_rate": 2e-05,
"loss": 9123.9082,
"step": 14
},
{
"epoch": 0.15,
"learning_rate": 2e-05,
"loss": 10331.1914,
"step": 15
},
{
"epoch": 0.16,
"learning_rate": 2e-05,
"loss": 8695.0479,
"step": 16
},
{
"epoch": 0.17,
"learning_rate": 2e-05,
"loss": 7838.9502,
"step": 17
},
{
"epoch": 0.18,
"learning_rate": 2e-05,
"loss": 7516.7783,
"step": 18
},
{
"epoch": 0.19,
"learning_rate": 2e-05,
"loss": 8220.8701,
"step": 19
},
{
"epoch": 0.2,
"learning_rate": 2e-05,
"loss": 6244.4844,
"step": 20
},
{
"epoch": 0.21,
"learning_rate": 2e-05,
"loss": 7569.1367,
"step": 21
},
{
"epoch": 0.22,
"learning_rate": 2e-05,
"loss": 6951.0103,
"step": 22
},
{
"epoch": 0.23,
"learning_rate": 2e-05,
"loss": 7107.9727,
"step": 23
},
{
"epoch": 0.24,
"learning_rate": 2e-05,
"loss": 7161.4819,
"step": 24
},
{
"epoch": 0.25,
"learning_rate": 2e-05,
"loss": 7868.584,
"step": 25
},
{
"epoch": 0.26,
"learning_rate": 2e-05,
"loss": 6625.4795,
"step": 26
},
{
"epoch": 0.27,
"learning_rate": 2e-05,
"loss": 6222.7422,
"step": 27
},
{
"epoch": 0.28,
"learning_rate": 2e-05,
"loss": 6319.2148,
"step": 28
},
{
"epoch": 0.29,
"learning_rate": 2e-05,
"loss": 5998.3438,
"step": 29
},
{
"epoch": 0.3,
"learning_rate": 2e-05,
"loss": 6406.7197,
"step": 30
},
{
"epoch": 0.31,
"learning_rate": 2e-05,
"loss": 6865.812,
"step": 31
},
{
"epoch": 0.32,
"learning_rate": 2e-05,
"loss": 5550.9609,
"step": 32
},
{
"epoch": 0.33,
"learning_rate": 2e-05,
"loss": 6530.6548,
"step": 33
},
{
"epoch": 0.34,
"learning_rate": 2e-05,
"loss": 5644.4668,
"step": 34
},
{
"epoch": 0.35,
"learning_rate": 2e-05,
"loss": 6787.8086,
"step": 35
},
{
"epoch": 0.36,
"learning_rate": 2e-05,
"loss": 6230.1162,
"step": 36
},
{
"epoch": 0.37,
"learning_rate": 2e-05,
"loss": 5976.9404,
"step": 37
},
{
"epoch": 0.38,
"learning_rate": 2e-05,
"loss": 5261.2661,
"step": 38
},
{
"epoch": 0.39,
"learning_rate": 2e-05,
"loss": 5871.9727,
"step": 39
},
{
"epoch": 0.4,
"learning_rate": 2e-05,
"loss": 5244.6387,
"step": 40
},
{
"epoch": 0.4,
"eval_loss": 5789.15625,
"eval_runtime": 153.2221,
"eval_samples_per_second": 10.416,
"eval_steps_per_second": 0.653,
"step": 40
},
{
"epoch": 0.41,
"learning_rate": 2e-05,
"loss": 5647.2305,
"step": 41
},
{
"epoch": 0.42,
"learning_rate": 2e-05,
"loss": 5769.7246,
"step": 42
},
{
"epoch": 0.43,
"learning_rate": 2e-05,
"loss": 5806.084,
"step": 43
},
{
"epoch": 0.44,
"learning_rate": 2e-05,
"loss": 6338.6992,
"step": 44
},
{
"epoch": 0.45,
"learning_rate": 2e-05,
"loss": 6663.2988,
"step": 45
},
{
"epoch": 0.46,
"learning_rate": 2e-05,
"loss": 5164.0898,
"step": 46
},
{
"epoch": 0.47,
"learning_rate": 2e-05,
"loss": 5250.1279,
"step": 47
},
{
"epoch": 0.48,
"learning_rate": 2e-05,
"loss": 4920.438,
"step": 48
},
{
"epoch": 0.49,
"learning_rate": 2e-05,
"loss": 5083.2578,
"step": 49
},
{
"epoch": 0.5,
"learning_rate": 2e-05,
"loss": 5441.7505,
"step": 50
},
{
"epoch": 0.51,
"learning_rate": 2e-05,
"loss": 5271.9443,
"step": 51
},
{
"epoch": 0.52,
"learning_rate": 2e-05,
"loss": 6324.0801,
"step": 52
},
{
"epoch": 0.53,
"learning_rate": 2e-05,
"loss": 5859.5479,
"step": 53
},
{
"epoch": 0.54,
"learning_rate": 2e-05,
"loss": 5835.2354,
"step": 54
},
{
"epoch": 0.55,
"learning_rate": 2e-05,
"loss": 5948.1357,
"step": 55
},
{
"epoch": 0.56,
"learning_rate": 2e-05,
"loss": 4974.9961,
"step": 56
},
{
"epoch": 0.57,
"learning_rate": 2e-05,
"loss": 5055.021,
"step": 57
},
{
"epoch": 0.58,
"learning_rate": 2e-05,
"loss": 5852.9814,
"step": 58
},
{
"epoch": 0.59,
"learning_rate": 2e-05,
"loss": 4996.5635,
"step": 59
},
{
"epoch": 0.6,
"learning_rate": 2e-05,
"loss": 4621.3154,
"step": 60
},
{
"epoch": 0.61,
"learning_rate": 2e-05,
"loss": 4878.3037,
"step": 61
},
{
"epoch": 0.62,
"learning_rate": 2e-05,
"loss": 4970.6401,
"step": 62
},
{
"epoch": 0.63,
"learning_rate": 2e-05,
"loss": 4931.6152,
"step": 63
},
{
"epoch": 0.64,
"learning_rate": 2e-05,
"loss": 5476.6318,
"step": 64
},
{
"epoch": 0.65,
"learning_rate": 2e-05,
"loss": 5228.8818,
"step": 65
},
{
"epoch": 0.66,
"learning_rate": 2e-05,
"loss": 4760.2158,
"step": 66
},
{
"epoch": 0.67,
"learning_rate": 2e-05,
"loss": 4873.3926,
"step": 67
},
{
"epoch": 0.68,
"learning_rate": 2e-05,
"loss": 5457.2378,
"step": 68
},
{
"epoch": 0.69,
"learning_rate": 2e-05,
"loss": 5250.7954,
"step": 69
},
{
"epoch": 0.7,
"learning_rate": 2e-05,
"loss": 4680.854,
"step": 70
},
{
"epoch": 0.71,
"learning_rate": 2e-05,
"loss": 4848.3828,
"step": 71
},
{
"epoch": 0.72,
"learning_rate": 2e-05,
"loss": 5283.5181,
"step": 72
},
{
"epoch": 0.73,
"learning_rate": 2e-05,
"loss": 4919.6475,
"step": 73
},
{
"epoch": 0.74,
"learning_rate": 2e-05,
"loss": 5075.8682,
"step": 74
},
{
"epoch": 0.75,
"learning_rate": 2e-05,
"loss": 4698.2148,
"step": 75
},
{
"epoch": 0.76,
"learning_rate": 2e-05,
"loss": 4929.0264,
"step": 76
},
{
"epoch": 0.77,
"learning_rate": 2e-05,
"loss": 5158.9775,
"step": 77
},
{
"epoch": 0.78,
"learning_rate": 2e-05,
"loss": 5029.708,
"step": 78
},
{
"epoch": 0.79,
"learning_rate": 2e-05,
"loss": 5271.4033,
"step": 79
},
{
"epoch": 0.8,
"learning_rate": 2e-05,
"loss": 4820.6875,
"step": 80
},
{
"epoch": 0.8,
"eval_loss": 4961.72412109375,
"eval_runtime": 153.3149,
"eval_samples_per_second": 10.41,
"eval_steps_per_second": 0.652,
"step": 80
},
{
"epoch": 0.81,
"learning_rate": 2e-05,
"loss": 4819.8237,
"step": 81
},
{
"epoch": 0.82,
"learning_rate": 2e-05,
"loss": 4838.397,
"step": 82
},
{
"epoch": 0.83,
"learning_rate": 2e-05,
"loss": 5228.1221,
"step": 83
},
{
"epoch": 0.84,
"learning_rate": 2e-05,
"loss": 5043.3447,
"step": 84
},
{
"epoch": 0.85,
"learning_rate": 2e-05,
"loss": 5215.2246,
"step": 85
},
{
"epoch": 0.86,
"learning_rate": 2e-05,
"loss": 4427.6909,
"step": 86
},
{
"epoch": 0.87,
"learning_rate": 2e-05,
"loss": 5120.7344,
"step": 87
},
{
"epoch": 0.88,
"learning_rate": 2e-05,
"loss": 5044.043,
"step": 88
},
{
"epoch": 0.89,
"learning_rate": 2e-05,
"loss": 4083.1196,
"step": 89
},
{
"epoch": 0.9,
"learning_rate": 2e-05,
"loss": 4404.4829,
"step": 90
},
{
"epoch": 0.91,
"learning_rate": 2e-05,
"loss": 5224.4292,
"step": 91
},
{
"epoch": 0.92,
"learning_rate": 2e-05,
"loss": 5386.4004,
"step": 92
},
{
"epoch": 0.93,
"learning_rate": 2e-05,
"loss": 5193.6807,
"step": 93
},
{
"epoch": 0.94,
"learning_rate": 2e-05,
"loss": 5194.8838,
"step": 94
},
{
"epoch": 0.95,
"learning_rate": 2e-05,
"loss": 5112.0229,
"step": 95
},
{
"epoch": 0.96,
"learning_rate": 2e-05,
"loss": 4970.9795,
"step": 96
},
{
"epoch": 0.97,
"learning_rate": 2e-05,
"loss": 4510.7227,
"step": 97
},
{
"epoch": 0.98,
"learning_rate": 2e-05,
"loss": 5206.0742,
"step": 98
},
{
"epoch": 0.99,
"learning_rate": 2e-05,
"loss": 5560.2061,
"step": 99
},
{
"epoch": 1.0,
"learning_rate": 2e-05,
"loss": 4309.6255,
"step": 100
},
{
"epoch": 1.01,
"learning_rate": 2e-05,
"loss": 4270.1172,
"step": 101
},
{
"epoch": 1.02,
"learning_rate": 2e-05,
"loss": 4307.147,
"step": 102
},
{
"epoch": 1.03,
"learning_rate": 2e-05,
"loss": 5392.7363,
"step": 103
},
{
"epoch": 1.04,
"learning_rate": 2e-05,
"loss": 4613.896,
"step": 104
},
{
"epoch": 1.05,
"learning_rate": 2e-05,
"loss": 4642.5605,
"step": 105
},
{
"epoch": 1.06,
"learning_rate": 2e-05,
"loss": 4990.2676,
"step": 106
},
{
"epoch": 1.07,
"learning_rate": 2e-05,
"loss": 4562.7651,
"step": 107
},
{
"epoch": 1.08,
"learning_rate": 2e-05,
"loss": 4420.7632,
"step": 108
},
{
"epoch": 1.09,
"learning_rate": 2e-05,
"loss": 4600.2861,
"step": 109
},
{
"epoch": 1.1,
"learning_rate": 2e-05,
"loss": 4772.0371,
"step": 110
},
{
"epoch": 1.11,
"learning_rate": 2e-05,
"loss": 5246.8555,
"step": 111
},
{
"epoch": 1.12,
"learning_rate": 2e-05,
"loss": 4700.6631,
"step": 112
},
{
"epoch": 1.13,
"learning_rate": 2e-05,
"loss": 4570.7441,
"step": 113
},
{
"epoch": 1.14,
"learning_rate": 2e-05,
"loss": 4589.0493,
"step": 114
},
{
"epoch": 1.15,
"learning_rate": 2e-05,
"loss": 4846.958,
"step": 115
},
{
"epoch": 1.16,
"learning_rate": 2e-05,
"loss": 4793.291,
"step": 116
},
{
"epoch": 1.17,
"learning_rate": 2e-05,
"loss": 5359.541,
"step": 117
},
{
"epoch": 1.18,
"learning_rate": 2e-05,
"loss": 4714.4346,
"step": 118
},
{
"epoch": 1.19,
"learning_rate": 2e-05,
"loss": 4767.8203,
"step": 119
},
{
"epoch": 1.2,
"learning_rate": 2e-05,
"loss": 5251.5449,
"step": 120
},
{
"epoch": 1.2,
"eval_loss": 4679.81640625,
"eval_runtime": 153.1502,
"eval_samples_per_second": 10.421,
"eval_steps_per_second": 0.653,
"step": 120
},
{
"epoch": 1.21,
"learning_rate": 2e-05,
"loss": 4597.8896,
"step": 121
},
{
"epoch": 1.22,
"learning_rate": 2e-05,
"loss": 4525.1045,
"step": 122
},
{
"epoch": 1.23,
"learning_rate": 2e-05,
"loss": 4740.7261,
"step": 123
},
{
"epoch": 1.24,
"learning_rate": 2e-05,
"loss": 4635.7217,
"step": 124
},
{
"epoch": 1.25,
"learning_rate": 2e-05,
"loss": 4253.7402,
"step": 125
},
{
"epoch": 1.26,
"learning_rate": 2e-05,
"loss": 4565.4546,
"step": 126
},
{
"epoch": 1.27,
"learning_rate": 2e-05,
"loss": 4249.1875,
"step": 127
},
{
"epoch": 1.28,
"learning_rate": 2e-05,
"loss": 5051.9473,
"step": 128
},
{
"epoch": 1.29,
"learning_rate": 2e-05,
"loss": 4128.4834,
"step": 129
},
{
"epoch": 1.3,
"learning_rate": 2e-05,
"loss": 4775.8789,
"step": 130
},
{
"epoch": 1.31,
"learning_rate": 2e-05,
"loss": 4661.0552,
"step": 131
},
{
"epoch": 1.32,
"learning_rate": 2e-05,
"loss": 4616.229,
"step": 132
},
{
"epoch": 1.33,
"learning_rate": 2e-05,
"loss": 5068.2056,
"step": 133
},
{
"epoch": 1.34,
"learning_rate": 2e-05,
"loss": 5291.3818,
"step": 134
},
{
"epoch": 1.35,
"learning_rate": 2e-05,
"loss": 4666.6348,
"step": 135
},
{
"epoch": 1.36,
"learning_rate": 2e-05,
"loss": 5022.856,
"step": 136
},
{
"epoch": 1.37,
"learning_rate": 2e-05,
"loss": 4555.9136,
"step": 137
},
{
"epoch": 1.38,
"learning_rate": 2e-05,
"loss": 4543.4004,
"step": 138
},
{
"epoch": 1.39,
"learning_rate": 2e-05,
"loss": 5208.0181,
"step": 139
},
{
"epoch": 1.4,
"learning_rate": 2e-05,
"loss": 4202.6709,
"step": 140
},
{
"epoch": 1.41,
"learning_rate": 2e-05,
"loss": 4570.6206,
"step": 141
},
{
"epoch": 1.42,
"learning_rate": 2e-05,
"loss": 5114.9287,
"step": 142
},
{
"epoch": 1.43,
"learning_rate": 2e-05,
"loss": 4141.6299,
"step": 143
},
{
"epoch": 1.44,
"learning_rate": 2e-05,
"loss": 4849.125,
"step": 144
},
{
"epoch": 1.45,
"learning_rate": 2e-05,
"loss": 4281.4214,
"step": 145
},
{
"epoch": 1.46,
"learning_rate": 2e-05,
"loss": 4833.1548,
"step": 146
},
{
"epoch": 1.47,
"learning_rate": 2e-05,
"loss": 4661.8291,
"step": 147
},
{
"epoch": 1.48,
"learning_rate": 2e-05,
"loss": 4485.2344,
"step": 148
},
{
"epoch": 1.49,
"learning_rate": 2e-05,
"loss": 4128.439,
"step": 149
},
{
"epoch": 1.5,
"learning_rate": 2e-05,
"loss": 4300.2852,
"step": 150
},
{
"epoch": 1.51,
"learning_rate": 2e-05,
"loss": 4689.8813,
"step": 151
},
{
"epoch": 1.52,
"learning_rate": 2e-05,
"loss": 4261.6787,
"step": 152
},
{
"epoch": 1.53,
"learning_rate": 2e-05,
"loss": 4410.7871,
"step": 153
},
{
"epoch": 1.54,
"learning_rate": 2e-05,
"loss": 4859.6416,
"step": 154
},
{
"epoch": 1.55,
"learning_rate": 2e-05,
"loss": 4945.5195,
"step": 155
},
{
"epoch": 1.56,
"learning_rate": 2e-05,
"loss": 4227.752,
"step": 156
},
{
"epoch": 1.57,
"learning_rate": 2e-05,
"loss": 4758.1689,
"step": 157
},
{
"epoch": 1.58,
"learning_rate": 2e-05,
"loss": 4772.7354,
"step": 158
},
{
"epoch": 1.59,
"learning_rate": 2e-05,
"loss": 4114.6689,
"step": 159
},
{
"epoch": 1.6,
"learning_rate": 2e-05,
"loss": 4792.8496,
"step": 160
},
{
"epoch": 1.6,
"eval_loss": 4522.2705078125,
"eval_runtime": 153.0119,
"eval_samples_per_second": 10.431,
"eval_steps_per_second": 0.654,
"step": 160
},
{
"epoch": 1.61,
"learning_rate": 2e-05,
"loss": 4813.7637,
"step": 161
},
{
"epoch": 1.62,
"learning_rate": 2e-05,
"loss": 3838.3706,
"step": 162
},
{
"epoch": 1.63,
"learning_rate": 2e-05,
"loss": 4562.4961,
"step": 163
},
{
"epoch": 1.64,
"learning_rate": 2e-05,
"loss": 4465.833,
"step": 164
},
{
"epoch": 1.65,
"learning_rate": 2e-05,
"loss": 4186.3252,
"step": 165
},
{
"epoch": 1.66,
"learning_rate": 2e-05,
"loss": 4580.1899,
"step": 166
},
{
"epoch": 1.67,
"learning_rate": 2e-05,
"loss": 4878.4688,
"step": 167
},
{
"epoch": 1.68,
"learning_rate": 2e-05,
"loss": 4298.249,
"step": 168
},
{
"epoch": 1.69,
"learning_rate": 2e-05,
"loss": 4254.103,
"step": 169
},
{
"epoch": 1.7,
"learning_rate": 2e-05,
"loss": 3883.5581,
"step": 170
},
{
"epoch": 1.71,
"learning_rate": 2e-05,
"loss": 4455.5347,
"step": 171
},
{
"epoch": 1.72,
"learning_rate": 2e-05,
"loss": 4322.6348,
"step": 172
},
{
"epoch": 1.73,
"learning_rate": 2e-05,
"loss": 4249.686,
"step": 173
},
{
"epoch": 1.74,
"learning_rate": 2e-05,
"loss": 4670.4375,
"step": 174
},
{
"epoch": 1.75,
"learning_rate": 2e-05,
"loss": 3984.6772,
"step": 175
},
{
"epoch": 1.76,
"learning_rate": 2e-05,
"loss": 4424.6416,
"step": 176
},
{
"epoch": 1.77,
"learning_rate": 2e-05,
"loss": 4487.6416,
"step": 177
},
{
"epoch": 1.78,
"learning_rate": 2e-05,
"loss": 4786.0723,
"step": 178
},
{
"epoch": 1.79,
"learning_rate": 2e-05,
"loss": 4320.7349,
"step": 179
},
{
"epoch": 1.8,
"learning_rate": 2e-05,
"loss": 4392.415,
"step": 180
},
{
"epoch": 1.81,
"learning_rate": 2e-05,
"loss": 4128.5469,
"step": 181
},
{
"epoch": 1.82,
"learning_rate": 2e-05,
"loss": 3805.3416,
"step": 182
},
{
"epoch": 1.83,
"learning_rate": 2e-05,
"loss": 5295.7549,
"step": 183
},
{
"epoch": 1.84,
"learning_rate": 2e-05,
"loss": 4073.2388,
"step": 184
},
{
"epoch": 1.85,
"learning_rate": 2e-05,
"loss": 4689.1528,
"step": 185
},
{
"epoch": 1.86,
"learning_rate": 2e-05,
"loss": 4331.627,
"step": 186
},
{
"epoch": 1.87,
"learning_rate": 2e-05,
"loss": 4956.3774,
"step": 187
},
{
"epoch": 1.88,
"learning_rate": 2e-05,
"loss": 3998.3621,
"step": 188
},
{
"epoch": 1.89,
"learning_rate": 2e-05,
"loss": 4095.9175,
"step": 189
},
{
"epoch": 1.9,
"learning_rate": 2e-05,
"loss": 3817.9526,
"step": 190
},
{
"epoch": 1.91,
"learning_rate": 2e-05,
"loss": 4632.4111,
"step": 191
},
{
"epoch": 1.92,
"learning_rate": 2e-05,
"loss": 3853.741,
"step": 192
},
{
"epoch": 1.93,
"learning_rate": 2e-05,
"loss": 3862.7646,
"step": 193
},
{
"epoch": 1.94,
"learning_rate": 2e-05,
"loss": 4408.2993,
"step": 194
},
{
"epoch": 1.95,
"learning_rate": 2e-05,
"loss": 3478.6504,
"step": 195
},
{
"epoch": 1.96,
"learning_rate": 2e-05,
"loss": 4667.4932,
"step": 196
},
{
"epoch": 1.97,
"learning_rate": 2e-05,
"loss": 3785.4988,
"step": 197
},
{
"epoch": 1.98,
"learning_rate": 2e-05,
"loss": 4366.5952,
"step": 198
},
{
"epoch": 1.99,
"learning_rate": 2e-05,
"loss": 4249.0308,
"step": 199
},
{
"epoch": 2.0,
"learning_rate": 2e-05,
"loss": 4039.4888,
"step": 200
},
{
"epoch": 2.0,
"eval_loss": 4252.13720703125,
"eval_runtime": 152.5389,
"eval_samples_per_second": 10.463,
"eval_steps_per_second": 0.656,
"step": 200
},
{
"epoch": 2.01,
"learning_rate": 2e-05,
"loss": 4239.4941,
"step": 201
},
{
"epoch": 2.02,
"learning_rate": 2e-05,
"loss": 3973.5347,
"step": 202
},
{
"epoch": 2.03,
"learning_rate": 2e-05,
"loss": 4343.3428,
"step": 203
},
{
"epoch": 2.04,
"learning_rate": 2e-05,
"loss": 3764.2117,
"step": 204
},
{
"epoch": 2.05,
"learning_rate": 2e-05,
"loss": 4449.8027,
"step": 205
},
{
"epoch": 2.06,
"learning_rate": 2e-05,
"loss": 4788.9707,
"step": 206
},
{
"epoch": 2.07,
"learning_rate": 2e-05,
"loss": 4126.4805,
"step": 207
},
{
"epoch": 2.08,
"learning_rate": 2e-05,
"loss": 4510.4546,
"step": 208
},
{
"epoch": 2.09,
"learning_rate": 2e-05,
"loss": 3748.04,
"step": 209
},
{
"epoch": 2.1,
"learning_rate": 2e-05,
"loss": 4639.4795,
"step": 210
},
{
"epoch": 2.11,
"learning_rate": 2e-05,
"loss": 4275.8872,
"step": 211
},
{
"epoch": 2.12,
"learning_rate": 2e-05,
"loss": 4072.8267,
"step": 212
},
{
"epoch": 2.13,
"learning_rate": 2e-05,
"loss": 4090.3594,
"step": 213
},
{
"epoch": 2.14,
"learning_rate": 2e-05,
"loss": 4549.5156,
"step": 214
},
{
"epoch": 2.15,
"learning_rate": 2e-05,
"loss": 4239.3809,
"step": 215
},
{
"epoch": 2.16,
"learning_rate": 2e-05,
"loss": 4693.9189,
"step": 216
},
{
"epoch": 2.17,
"learning_rate": 2e-05,
"loss": 3877.6008,
"step": 217
},
{
"epoch": 2.18,
"learning_rate": 2e-05,
"loss": 4515.2139,
"step": 218
},
{
"epoch": 2.19,
"learning_rate": 2e-05,
"loss": 4458.5381,
"step": 219
},
{
"epoch": 2.2,
"learning_rate": 2e-05,
"loss": 4279.3545,
"step": 220
},
{
"epoch": 2.21,
"learning_rate": 2e-05,
"loss": 4421.5928,
"step": 221
},
{
"epoch": 2.22,
"learning_rate": 2e-05,
"loss": 4403.8462,
"step": 222
},
{
"epoch": 2.23,
"learning_rate": 2e-05,
"loss": 3923.6372,
"step": 223
},
{
"epoch": 2.24,
"learning_rate": 2e-05,
"loss": 3892.7231,
"step": 224
},
{
"epoch": 2.25,
"learning_rate": 2e-05,
"loss": 4327.0947,
"step": 225
},
{
"epoch": 2.26,
"learning_rate": 2e-05,
"loss": 4771.2183,
"step": 226
},
{
"epoch": 2.27,
"learning_rate": 2e-05,
"loss": 4328.8911,
"step": 227
},
{
"epoch": 2.28,
"learning_rate": 2e-05,
"loss": 4228.5918,
"step": 228
},
{
"epoch": 2.29,
"learning_rate": 2e-05,
"loss": 4549.1064,
"step": 229
},
{
"epoch": 2.3,
"learning_rate": 2e-05,
"loss": 4156.6675,
"step": 230
},
{
"epoch": 2.31,
"learning_rate": 2e-05,
"loss": 4196.0254,
"step": 231
},
{
"epoch": 2.32,
"learning_rate": 2e-05,
"loss": 4296.939,
"step": 232
},
{
"epoch": 2.33,
"learning_rate": 2e-05,
"loss": 4569.1528,
"step": 233
},
{
"epoch": 2.34,
"learning_rate": 2e-05,
"loss": 3441.3088,
"step": 234
},
{
"epoch": 2.35,
"learning_rate": 2e-05,
"loss": 3583.4678,
"step": 235
},
{
"epoch": 2.36,
"learning_rate": 2e-05,
"loss": 3813.4893,
"step": 236
},
{
"epoch": 2.37,
"learning_rate": 2e-05,
"loss": 4058.2871,
"step": 237
},
{
"epoch": 2.38,
"learning_rate": 2e-05,
"loss": 3981.6729,
"step": 238
},
{
"epoch": 2.39,
"learning_rate": 2e-05,
"loss": 3813.9546,
"step": 239
},
{
"epoch": 2.4,
"learning_rate": 2e-05,
"loss": 4029.8252,
"step": 240
},
{
"epoch": 2.4,
"eval_loss": 4160.52294921875,
"eval_runtime": 152.8787,
"eval_samples_per_second": 10.44,
"eval_steps_per_second": 0.654,
"step": 240
},
{
"epoch": 2.41,
"learning_rate": 2e-05,
"loss": 4221.4492,
"step": 241
},
{
"epoch": 2.42,
"learning_rate": 2e-05,
"loss": 4045.0322,
"step": 242
},
{
"epoch": 2.43,
"learning_rate": 2e-05,
"loss": 4226.5371,
"step": 243
},
{
"epoch": 2.44,
"learning_rate": 2e-05,
"loss": 3511.958,
"step": 244
},
{
"epoch": 2.45,
"learning_rate": 2e-05,
"loss": 4395.8027,
"step": 245
},
{
"epoch": 2.46,
"learning_rate": 2e-05,
"loss": 4735.3145,
"step": 246
},
{
"epoch": 2.47,
"learning_rate": 2e-05,
"loss": 3843.616,
"step": 247
},
{
"epoch": 2.48,
"learning_rate": 2e-05,
"loss": 4264.6924,
"step": 248
},
{
"epoch": 2.49,
"learning_rate": 2e-05,
"loss": 4039.4785,
"step": 249
},
{
"epoch": 2.5,
"learning_rate": 2e-05,
"loss": 4019.9854,
"step": 250
},
{
"epoch": 2.51,
"learning_rate": 2e-05,
"loss": 4662.4287,
"step": 251
},
{
"epoch": 2.52,
"learning_rate": 2e-05,
"loss": 4328.1245,
"step": 252
},
{
"epoch": 2.53,
"learning_rate": 2e-05,
"loss": 3456.5835,
"step": 253
},
{
"epoch": 2.54,
"learning_rate": 2e-05,
"loss": 4076.3733,
"step": 254
},
{
"epoch": 2.55,
"learning_rate": 2e-05,
"loss": 3871.9949,
"step": 255
},
{
"epoch": 2.56,
"learning_rate": 2e-05,
"loss": 3940.6655,
"step": 256
},
{
"epoch": 2.57,
"learning_rate": 2e-05,
"loss": 3684.9651,
"step": 257
},
{
"epoch": 2.58,
"learning_rate": 2e-05,
"loss": 3649.6597,
"step": 258
},
{
"epoch": 2.59,
"learning_rate": 2e-05,
"loss": 4184.002,
"step": 259
},
{
"epoch": 2.6,
"learning_rate": 2e-05,
"loss": 3991.2231,
"step": 260
},
{
"epoch": 2.61,
"learning_rate": 2e-05,
"loss": 3957.1196,
"step": 261
},
{
"epoch": 2.62,
"learning_rate": 2e-05,
"loss": 4392.5942,
"step": 262
},
{
"epoch": 2.63,
"learning_rate": 2e-05,
"loss": 3652.1855,
"step": 263
},
{
"epoch": 2.64,
"learning_rate": 2e-05,
"loss": 4256.6865,
"step": 264
},
{
"epoch": 2.65,
"learning_rate": 2e-05,
"loss": 3684.3928,
"step": 265
},
{
"epoch": 2.66,
"learning_rate": 2e-05,
"loss": 3780.9175,
"step": 266
},
{
"epoch": 2.67,
"learning_rate": 2e-05,
"loss": 4062.6343,
"step": 267
},
{
"epoch": 2.68,
"learning_rate": 2e-05,
"loss": 4281.2744,
"step": 268
},
{
"epoch": 2.69,
"learning_rate": 2e-05,
"loss": 4767.3281,
"step": 269
},
{
"epoch": 2.7,
"learning_rate": 2e-05,
"loss": 4602.7568,
"step": 270
},
{
"epoch": 2.71,
"learning_rate": 2e-05,
"loss": 4051.676,
"step": 271
},
{
"epoch": 2.72,
"learning_rate": 2e-05,
"loss": 4140.875,
"step": 272
},
{
"epoch": 2.73,
"learning_rate": 2e-05,
"loss": 3707.1763,
"step": 273
},
{
"epoch": 2.74,
"learning_rate": 2e-05,
"loss": 3892.2764,
"step": 274
},
{
"epoch": 2.75,
"learning_rate": 2e-05,
"loss": 3792.2725,
"step": 275
},
{
"epoch": 2.76,
"learning_rate": 2e-05,
"loss": 4242.418,
"step": 276
},
{
"epoch": 2.77,
"learning_rate": 2e-05,
"loss": 4035.6191,
"step": 277
},
{
"epoch": 2.78,
"learning_rate": 2e-05,
"loss": 4154.3467,
"step": 278
},
{
"epoch": 2.79,
"learning_rate": 2e-05,
"loss": 3960.1741,
"step": 279
},
{
"epoch": 2.8,
"learning_rate": 2e-05,
"loss": 3626.5103,
"step": 280
},
{
"epoch": 2.8,
"eval_loss": 4051.461669921875,
"eval_runtime": 152.381,
"eval_samples_per_second": 10.474,
"eval_steps_per_second": 0.656,
"step": 280
},
{
"epoch": 2.81,
"learning_rate": 2e-05,
"loss": 4040.4492,
"step": 281
},
{
"epoch": 2.82,
"learning_rate": 2e-05,
"loss": 3889.311,
"step": 282
},
{
"epoch": 2.83,
"learning_rate": 2e-05,
"loss": 3271.3108,
"step": 283
},
{
"epoch": 2.84,
"learning_rate": 2e-05,
"loss": 4014.6853,
"step": 284
},
{
"epoch": 2.85,
"learning_rate": 2e-05,
"loss": 4074.5417,
"step": 285
},
{
"epoch": 2.86,
"learning_rate": 2e-05,
"loss": 4337.8213,
"step": 286
},
{
"epoch": 2.87,
"learning_rate": 2e-05,
"loss": 3792.6384,
"step": 287
},
{
"epoch": 2.88,
"learning_rate": 2e-05,
"loss": 3692.104,
"step": 288
},
{
"epoch": 2.89,
"learning_rate": 2e-05,
"loss": 4260.9463,
"step": 289
},
{
"epoch": 2.9,
"learning_rate": 2e-05,
"loss": 3741.4792,
"step": 290
},
{
"epoch": 2.91,
"learning_rate": 2e-05,
"loss": 3809.4341,
"step": 291
},
{
"epoch": 2.92,
"learning_rate": 2e-05,
"loss": 4053.488,
"step": 292
},
{
"epoch": 2.93,
"learning_rate": 2e-05,
"loss": 3743.4941,
"step": 293
},
{
"epoch": 2.94,
"learning_rate": 2e-05,
"loss": 4357.4707,
"step": 294
},
{
"epoch": 2.95,
"learning_rate": 2e-05,
"loss": 4021.8262,
"step": 295
},
{
"epoch": 2.96,
"learning_rate": 2e-05,
"loss": 3761.1348,
"step": 296
},
{
"epoch": 2.97,
"learning_rate": 2e-05,
"loss": 4424.8853,
"step": 297
},
{
"epoch": 2.98,
"learning_rate": 2e-05,
"loss": 3830.8997,
"step": 298
},
{
"epoch": 2.99,
"learning_rate": 2e-05,
"loss": 4062.6968,
"step": 299
},
{
"epoch": 3.0,
"learning_rate": 2e-05,
"loss": 4436.0918,
"step": 300
},
{
"epoch": 3.01,
"learning_rate": 2e-05,
"loss": 4233.5381,
"step": 301
},
{
"epoch": 3.02,
"learning_rate": 2e-05,
"loss": 4343.8237,
"step": 302
},
{
"epoch": 3.03,
"learning_rate": 2e-05,
"loss": 4052.5039,
"step": 303
},
{
"epoch": 3.04,
"learning_rate": 2e-05,
"loss": 4048.3113,
"step": 304
},
{
"epoch": 3.05,
"learning_rate": 2e-05,
"loss": 3583.0908,
"step": 305
},
{
"epoch": 3.06,
"learning_rate": 2e-05,
"loss": 3624.552,
"step": 306
},
{
"epoch": 3.07,
"learning_rate": 2e-05,
"loss": 4182.4458,
"step": 307
},
{
"epoch": 3.08,
"learning_rate": 2e-05,
"loss": 3741.467,
"step": 308
},
{
"epoch": 3.09,
"learning_rate": 2e-05,
"loss": 4421.3584,
"step": 309
},
{
"epoch": 3.1,
"learning_rate": 2e-05,
"loss": 3731.9844,
"step": 310
},
{
"epoch": 3.11,
"learning_rate": 2e-05,
"loss": 3778.5786,
"step": 311
},
{
"epoch": 3.12,
"learning_rate": 2e-05,
"loss": 3995.1738,
"step": 312
},
{
"epoch": 3.13,
"learning_rate": 2e-05,
"loss": 3414.417,
"step": 313
},
{
"epoch": 3.14,
"learning_rate": 2e-05,
"loss": 3799.9009,
"step": 314
},
{
"epoch": 3.15,
"learning_rate": 2e-05,
"loss": 3363.4624,
"step": 315
},
{
"epoch": 3.16,
"learning_rate": 2e-05,
"loss": 3599.3823,
"step": 316
},
{
"epoch": 3.17,
"learning_rate": 2e-05,
"loss": 4243.3867,
"step": 317
},
{
"epoch": 3.18,
"learning_rate": 2e-05,
"loss": 4223.4482,
"step": 318
},
{
"epoch": 3.19,
"learning_rate": 2e-05,
"loss": 3808.5315,
"step": 319
},
{
"epoch": 3.2,
"learning_rate": 2e-05,
"loss": 4241.2925,
"step": 320
},
{
"epoch": 3.2,
"eval_loss": 3989.93701171875,
"eval_runtime": 152.2822,
"eval_samples_per_second": 10.481,
"eval_steps_per_second": 0.657,
"step": 320
},
{
"epoch": 3.21,
"learning_rate": 2e-05,
"loss": 4326.0718,
"step": 321
},
{
"epoch": 3.22,
"learning_rate": 2e-05,
"loss": 4092.2671,
"step": 322
},
{
"epoch": 3.23,
"learning_rate": 2e-05,
"loss": 3987.1335,
"step": 323
},
{
"epoch": 3.24,
"learning_rate": 2e-05,
"loss": 3692.0537,
"step": 324
},
{
"epoch": 3.25,
"learning_rate": 2e-05,
"loss": 4115.0986,
"step": 325
},
{
"epoch": 3.26,
"learning_rate": 2e-05,
"loss": 3721.7102,
"step": 326
},
{
"epoch": 3.27,
"learning_rate": 2e-05,
"loss": 4032.0439,
"step": 327
},
{
"epoch": 3.28,
"learning_rate": 2e-05,
"loss": 3985.3152,
"step": 328
},
{
"epoch": 3.29,
"learning_rate": 2e-05,
"loss": 4192.1748,
"step": 329
},
{
"epoch": 3.3,
"learning_rate": 2e-05,
"loss": 4013.9749,
"step": 330
},
{
"epoch": 3.31,
"learning_rate": 2e-05,
"loss": 3703.3384,
"step": 331
},
{
"epoch": 3.32,
"learning_rate": 2e-05,
"loss": 3750.4739,
"step": 332
},
{
"epoch": 3.33,
"learning_rate": 2e-05,
"loss": 3136.6565,
"step": 333
},
{
"epoch": 3.34,
"learning_rate": 2e-05,
"loss": 3656.863,
"step": 334
},
{
"epoch": 3.35,
"learning_rate": 2e-05,
"loss": 3817.365,
"step": 335
},
{
"epoch": 3.36,
"learning_rate": 2e-05,
"loss": 3881.1313,
"step": 336
},
{
"epoch": 3.37,
"learning_rate": 2e-05,
"loss": 4028.1912,
"step": 337
},
{
"epoch": 3.38,
"learning_rate": 2e-05,
"loss": 3855.8794,
"step": 338
},
{
"epoch": 3.39,
"learning_rate": 2e-05,
"loss": 4090.366,
"step": 339
},
{
"epoch": 3.4,
"learning_rate": 2e-05,
"loss": 4035.5522,
"step": 340
},
{
"epoch": 3.41,
"learning_rate": 2e-05,
"loss": 3345.8081,
"step": 341
},
{
"epoch": 3.42,
"learning_rate": 2e-05,
"loss": 4388.6914,
"step": 342
},
{
"epoch": 3.43,
"learning_rate": 2e-05,
"loss": 3672.5535,
"step": 343
},
{
"epoch": 3.44,
"learning_rate": 2e-05,
"loss": 4601.6753,
"step": 344
},
{
"epoch": 3.45,
"learning_rate": 2e-05,
"loss": 3514.8469,
"step": 345
},
{
"epoch": 3.46,
"learning_rate": 2e-05,
"loss": 3831.9482,
"step": 346
},
{
"epoch": 3.47,
"learning_rate": 2e-05,
"loss": 3722.3574,
"step": 347
},
{
"epoch": 3.48,
"learning_rate": 2e-05,
"loss": 3708.3647,
"step": 348
},
{
"epoch": 3.49,
"learning_rate": 2e-05,
"loss": 3739.094,
"step": 349
},
{
"epoch": 3.5,
"learning_rate": 2e-05,
"loss": 3696.3413,
"step": 350
},
{
"epoch": 3.51,
"learning_rate": 2e-05,
"loss": 4573.146,
"step": 351
},
{
"epoch": 3.52,
"learning_rate": 2e-05,
"loss": 4276.0264,
"step": 352
},
{
"epoch": 3.53,
"learning_rate": 2e-05,
"loss": 3745.4736,
"step": 353
},
{
"epoch": 3.54,
"learning_rate": 2e-05,
"loss": 3880.3018,
"step": 354
},
{
"epoch": 3.55,
"learning_rate": 2e-05,
"loss": 3983.3145,
"step": 355
},
{
"epoch": 3.56,
"learning_rate": 2e-05,
"loss": 3436.9373,
"step": 356
},
{
"epoch": 3.57,
"learning_rate": 2e-05,
"loss": 4162.9082,
"step": 357
},
{
"epoch": 3.58,
"learning_rate": 2e-05,
"loss": 3945.7412,
"step": 358
},
{
"epoch": 3.59,
"learning_rate": 2e-05,
"loss": 3507.0898,
"step": 359
},
{
"epoch": 3.6,
"learning_rate": 2e-05,
"loss": 3669.127,
"step": 360
},
{
"epoch": 3.6,
"eval_loss": 4000.498046875,
"eval_runtime": 152.4801,
"eval_samples_per_second": 10.467,
"eval_steps_per_second": 0.656,
"step": 360
},
{
"epoch": 3.61,
"learning_rate": 2e-05,
"loss": 4219.0439,
"step": 361
},
{
"epoch": 3.62,
"learning_rate": 2e-05,
"loss": 3937.8618,
"step": 362
},
{
"epoch": 3.63,
"learning_rate": 2e-05,
"loss": 3954.0127,
"step": 363
},
{
"epoch": 3.64,
"learning_rate": 2e-05,
"loss": 3861.4602,
"step": 364
},
{
"epoch": 3.65,
"learning_rate": 2e-05,
"loss": 3659.9412,
"step": 365
},
{
"epoch": 3.66,
"learning_rate": 2e-05,
"loss": 3923.0183,
"step": 366
},
{
"epoch": 3.67,
"learning_rate": 2e-05,
"loss": 3869.6533,
"step": 367
},
{
"epoch": 3.68,
"learning_rate": 2e-05,
"loss": 4066.0229,
"step": 368
},
{
"epoch": 3.69,
"learning_rate": 2e-05,
"loss": 4978.6533,
"step": 369
},
{
"epoch": 3.7,
"learning_rate": 2e-05,
"loss": 3747.0063,
"step": 370
},
{
"epoch": 3.71,
"learning_rate": 2e-05,
"loss": 3962.3235,
"step": 371
},
{
"epoch": 3.72,
"learning_rate": 2e-05,
"loss": 4163.8613,
"step": 372
},
{
"epoch": 3.73,
"learning_rate": 2e-05,
"loss": 4133.5288,
"step": 373
},
{
"epoch": 3.74,
"learning_rate": 2e-05,
"loss": 4478.0767,
"step": 374
},
{
"epoch": 3.75,
"learning_rate": 2e-05,
"loss": 3869.3635,
"step": 375
},
{
"epoch": 3.76,
"learning_rate": 2e-05,
"loss": 4138.4487,
"step": 376
},
{
"epoch": 3.77,
"learning_rate": 2e-05,
"loss": 4073.0718,
"step": 377
},
{
"epoch": 3.78,
"learning_rate": 2e-05,
"loss": 4123.4678,
"step": 378
},
{
"epoch": 3.79,
"learning_rate": 2e-05,
"loss": 4103.5117,
"step": 379
},
{
"epoch": 3.8,
"learning_rate": 2e-05,
"loss": 3554.8037,
"step": 380
},
{
"epoch": 3.81,
"learning_rate": 2e-05,
"loss": 4128.582,
"step": 381
},
{
"epoch": 3.82,
"learning_rate": 2e-05,
"loss": 3922.896,
"step": 382
},
{
"epoch": 3.83,
"learning_rate": 2e-05,
"loss": 3866.6106,
"step": 383
},
{
"epoch": 3.84,
"learning_rate": 2e-05,
"loss": 3856.5063,
"step": 384
},
{
"epoch": 3.85,
"learning_rate": 2e-05,
"loss": 3534.6477,
"step": 385
},
{
"epoch": 3.86,
"learning_rate": 2e-05,
"loss": 3878.1855,
"step": 386
},
{
"epoch": 3.87,
"learning_rate": 2e-05,
"loss": 3660.9155,
"step": 387
},
{
"epoch": 3.88,
"learning_rate": 2e-05,
"loss": 3628.4751,
"step": 388
},
{
"epoch": 3.89,
"learning_rate": 2e-05,
"loss": 3467.9219,
"step": 389
},
{
"epoch": 3.9,
"learning_rate": 2e-05,
"loss": 3975.7178,
"step": 390
},
{
"epoch": 3.91,
"learning_rate": 2e-05,
"loss": 3414.8813,
"step": 391
},
{
"epoch": 3.92,
"learning_rate": 2e-05,
"loss": 4234.2061,
"step": 392
},
{
"epoch": 3.93,
"learning_rate": 2e-05,
"loss": 4341.897,
"step": 393
},
{
"epoch": 3.94,
"learning_rate": 2e-05,
"loss": 4035.8455,
"step": 394
},
{
"epoch": 3.95,
"learning_rate": 2e-05,
"loss": 3405.0151,
"step": 395
},
{
"epoch": 3.96,
"learning_rate": 2e-05,
"loss": 3455.5244,
"step": 396
},
{
"epoch": 3.97,
"learning_rate": 2e-05,
"loss": 3791.8367,
"step": 397
},
{
"epoch": 3.98,
"learning_rate": 2e-05,
"loss": 3939.1094,
"step": 398
},
{
"epoch": 3.99,
"learning_rate": 2e-05,
"loss": 4370.543,
"step": 399
},
{
"epoch": 4.0,
"learning_rate": 2e-05,
"loss": 4121.397,
"step": 400
},
{
"epoch": 4.0,
"eval_loss": 3884.240234375,
"eval_runtime": 152.4512,
"eval_samples_per_second": 10.469,
"eval_steps_per_second": 0.656,
"step": 400
}
],
"logging_steps": 1.0,
"max_steps": 400,
"num_input_tokens_seen": 0,
"num_train_epochs": 4,
"save_steps": 40,
"total_flos": 1.624099600371548e+17,
"train_batch_size": 16,
"trial_name": null,
"trial_params": null
}