Training in progress, step 441, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 83945296
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9393586a63805c19d385fe7296d24928f62baf6141ed304c7ed9a5297ff0014d
|
| 3 |
size 83945296
|
last-checkpoint/optimizer.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 43123028
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:8ba52fe056b1cd259fd539d694eb49c8e930675ab22a10503750558d43e277a5
|
| 3 |
size 43123028
|
last-checkpoint/rng_state.pth
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 14244
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:385950a51de83989c0b38163a7395eaf736599370e897d134f1fa06bf7e56b2d
|
| 3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1064
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9dd00bada8f46a77349713385549ad2b064ad4a51b495aa3ae2b1cc0a00582d3
|
| 3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 0.
|
| 5 |
"eval_steps": 147,
|
| 6 |
-
"global_step":
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
@@ -2089,6 +2089,1043 @@
|
|
| 2089 |
"eval_samples_per_second": 32.006,
|
| 2090 |
"eval_steps_per_second": 16.068,
|
| 2091 |
"step": 294
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2092 |
}
|
| 2093 |
],
|
| 2094 |
"logging_steps": 1,
|
|
@@ -2108,7 +3145,7 @@
|
|
| 2108 |
"attributes": {}
|
| 2109 |
}
|
| 2110 |
},
|
| 2111 |
-
"total_flos":
|
| 2112 |
"train_batch_size": 2,
|
| 2113 |
"trial_name": null,
|
| 2114 |
"trial_params": null
|
|
|
|
| 1 |
{
|
| 2 |
"best_metric": null,
|
| 3 |
"best_model_checkpoint": null,
|
| 4 |
+
"epoch": 0.7525597269624573,
|
| 5 |
"eval_steps": 147,
|
| 6 |
+
"global_step": 441,
|
| 7 |
"is_hyper_param_search": false,
|
| 8 |
"is_local_process_zero": true,
|
| 9 |
"is_world_process_zero": true,
|
|
|
|
| 2089 |
"eval_samples_per_second": 32.006,
|
| 2090 |
"eval_steps_per_second": 16.068,
|
| 2091 |
"step": 294
|
| 2092 |
+
},
|
| 2093 |
+
{
|
| 2094 |
+
"epoch": 0.5034129692832765,
|
| 2095 |
+
"grad_norm": 1.308112621307373,
|
| 2096 |
+
"learning_rate": 0.00010163617316264869,
|
| 2097 |
+
"loss": 0.3276,
|
| 2098 |
+
"step": 295
|
| 2099 |
+
},
|
| 2100 |
+
{
|
| 2101 |
+
"epoch": 0.5051194539249146,
|
| 2102 |
+
"grad_norm": 1.3719441890716553,
|
| 2103 |
+
"learning_rate": 0.00010109080914941824,
|
| 2104 |
+
"loss": 0.333,
|
| 2105 |
+
"step": 296
|
| 2106 |
+
},
|
| 2107 |
+
{
|
| 2108 |
+
"epoch": 0.5068259385665529,
|
| 2109 |
+
"grad_norm": 1.4522560834884644,
|
| 2110 |
+
"learning_rate": 0.00010054541268710138,
|
| 2111 |
+
"loss": 0.4089,
|
| 2112 |
+
"step": 297
|
| 2113 |
+
},
|
| 2114 |
+
{
|
| 2115 |
+
"epoch": 0.5085324232081911,
|
| 2116 |
+
"grad_norm": 1.53480863571167,
|
| 2117 |
+
"learning_rate": 0.0001,
|
| 2118 |
+
"loss": 0.3143,
|
| 2119 |
+
"step": 298
|
| 2120 |
+
},
|
| 2121 |
+
{
|
| 2122 |
+
"epoch": 0.5102389078498294,
|
| 2123 |
+
"grad_norm": 1.009020209312439,
|
| 2124 |
+
"learning_rate": 9.945458731289862e-05,
|
| 2125 |
+
"loss": 0.2903,
|
| 2126 |
+
"step": 299
|
| 2127 |
+
},
|
| 2128 |
+
{
|
| 2129 |
+
"epoch": 0.5119453924914675,
|
| 2130 |
+
"grad_norm": 1.199953317642212,
|
| 2131 |
+
"learning_rate": 9.890919085058178e-05,
|
| 2132 |
+
"loss": 0.3447,
|
| 2133 |
+
"step": 300
|
| 2134 |
+
},
|
| 2135 |
+
{
|
| 2136 |
+
"epoch": 0.5136518771331058,
|
| 2137 |
+
"grad_norm": 1.3703932762145996,
|
| 2138 |
+
"learning_rate": 9.836382683735132e-05,
|
| 2139 |
+
"loss": 0.3773,
|
| 2140 |
+
"step": 301
|
| 2141 |
+
},
|
| 2142 |
+
{
|
| 2143 |
+
"epoch": 0.515358361774744,
|
| 2144 |
+
"grad_norm": 1.3355997800827026,
|
| 2145 |
+
"learning_rate": 9.78185114965439e-05,
|
| 2146 |
+
"loss": 0.3451,
|
| 2147 |
+
"step": 302
|
| 2148 |
+
},
|
| 2149 |
+
{
|
| 2150 |
+
"epoch": 0.5170648464163823,
|
| 2151 |
+
"grad_norm": 1.148918628692627,
|
| 2152 |
+
"learning_rate": 9.727326105004817e-05,
|
| 2153 |
+
"loss": 0.3337,
|
| 2154 |
+
"step": 303
|
| 2155 |
+
},
|
| 2156 |
+
{
|
| 2157 |
+
"epoch": 0.5187713310580204,
|
| 2158 |
+
"grad_norm": 1.3731484413146973,
|
| 2159 |
+
"learning_rate": 9.67280917178224e-05,
|
| 2160 |
+
"loss": 0.3493,
|
| 2161 |
+
"step": 304
|
| 2162 |
+
},
|
| 2163 |
+
{
|
| 2164 |
+
"epoch": 0.5204778156996587,
|
| 2165 |
+
"grad_norm": 1.32598078250885,
|
| 2166 |
+
"learning_rate": 9.618301971741184e-05,
|
| 2167 |
+
"loss": 0.3609,
|
| 2168 |
+
"step": 305
|
| 2169 |
+
},
|
| 2170 |
+
{
|
| 2171 |
+
"epoch": 0.5221843003412969,
|
| 2172 |
+
"grad_norm": 1.6155914068222046,
|
| 2173 |
+
"learning_rate": 9.563806126346642e-05,
|
| 2174 |
+
"loss": 0.3587,
|
| 2175 |
+
"step": 306
|
| 2176 |
+
},
|
| 2177 |
+
{
|
| 2178 |
+
"epoch": 0.5238907849829352,
|
| 2179 |
+
"grad_norm": 1.488803744316101,
|
| 2180 |
+
"learning_rate": 9.509323256725821e-05,
|
| 2181 |
+
"loss": 0.5027,
|
| 2182 |
+
"step": 307
|
| 2183 |
+
},
|
| 2184 |
+
{
|
| 2185 |
+
"epoch": 0.5255972696245734,
|
| 2186 |
+
"grad_norm": 1.3675568103790283,
|
| 2187 |
+
"learning_rate": 9.454854983619936e-05,
|
| 2188 |
+
"loss": 0.392,
|
| 2189 |
+
"step": 308
|
| 2190 |
+
},
|
| 2191 |
+
{
|
| 2192 |
+
"epoch": 0.5273037542662116,
|
| 2193 |
+
"grad_norm": 1.1739741563796997,
|
| 2194 |
+
"learning_rate": 9.400402927335992e-05,
|
| 2195 |
+
"loss": 0.2947,
|
| 2196 |
+
"step": 309
|
| 2197 |
+
},
|
| 2198 |
+
{
|
| 2199 |
+
"epoch": 0.5290102389078498,
|
| 2200 |
+
"grad_norm": 1.101880669593811,
|
| 2201 |
+
"learning_rate": 9.345968707698569e-05,
|
| 2202 |
+
"loss": 0.3134,
|
| 2203 |
+
"step": 310
|
| 2204 |
+
},
|
| 2205 |
+
{
|
| 2206 |
+
"epoch": 0.5307167235494881,
|
| 2207 |
+
"grad_norm": 0.9541621804237366,
|
| 2208 |
+
"learning_rate": 9.29155394400166e-05,
|
| 2209 |
+
"loss": 0.2375,
|
| 2210 |
+
"step": 311
|
| 2211 |
+
},
|
| 2212 |
+
{
|
| 2213 |
+
"epoch": 0.5324232081911263,
|
| 2214 |
+
"grad_norm": 0.9731763601303101,
|
| 2215 |
+
"learning_rate": 9.237160254960476e-05,
|
| 2216 |
+
"loss": 0.2116,
|
| 2217 |
+
"step": 312
|
| 2218 |
+
},
|
| 2219 |
+
{
|
| 2220 |
+
"epoch": 0.5341296928327645,
|
| 2221 |
+
"grad_norm": 1.0994391441345215,
|
| 2222 |
+
"learning_rate": 9.182789258663321e-05,
|
| 2223 |
+
"loss": 0.2385,
|
| 2224 |
+
"step": 313
|
| 2225 |
+
},
|
| 2226 |
+
{
|
| 2227 |
+
"epoch": 0.5358361774744027,
|
| 2228 |
+
"grad_norm": 1.0539134740829468,
|
| 2229 |
+
"learning_rate": 9.128442572523417e-05,
|
| 2230 |
+
"loss": 0.286,
|
| 2231 |
+
"step": 314
|
| 2232 |
+
},
|
| 2233 |
+
{
|
| 2234 |
+
"epoch": 0.537542662116041,
|
| 2235 |
+
"grad_norm": 1.466916799545288,
|
| 2236 |
+
"learning_rate": 9.074121813230847e-05,
|
| 2237 |
+
"loss": 0.5136,
|
| 2238 |
+
"step": 315
|
| 2239 |
+
},
|
| 2240 |
+
{
|
| 2241 |
+
"epoch": 0.5392491467576792,
|
| 2242 |
+
"grad_norm": 1.0829576253890991,
|
| 2243 |
+
"learning_rate": 9.019828596704394e-05,
|
| 2244 |
+
"loss": 0.2998,
|
| 2245 |
+
"step": 316
|
| 2246 |
+
},
|
| 2247 |
+
{
|
| 2248 |
+
"epoch": 0.5409556313993175,
|
| 2249 |
+
"grad_norm": 1.1193914413452148,
|
| 2250 |
+
"learning_rate": 8.965564538043535e-05,
|
| 2251 |
+
"loss": 0.2675,
|
| 2252 |
+
"step": 317
|
| 2253 |
+
},
|
| 2254 |
+
{
|
| 2255 |
+
"epoch": 0.5426621160409556,
|
| 2256 |
+
"grad_norm": 1.0619031190872192,
|
| 2257 |
+
"learning_rate": 8.911331251480357e-05,
|
| 2258 |
+
"loss": 0.2498,
|
| 2259 |
+
"step": 318
|
| 2260 |
+
},
|
| 2261 |
+
{
|
| 2262 |
+
"epoch": 0.5443686006825939,
|
| 2263 |
+
"grad_norm": 1.3020758628845215,
|
| 2264 |
+
"learning_rate": 8.857130350331535e-05,
|
| 2265 |
+
"loss": 0.3403,
|
| 2266 |
+
"step": 319
|
| 2267 |
+
},
|
| 2268 |
+
{
|
| 2269 |
+
"epoch": 0.5460750853242321,
|
| 2270 |
+
"grad_norm": 1.603843092918396,
|
| 2271 |
+
"learning_rate": 8.802963446950377e-05,
|
| 2272 |
+
"loss": 0.3578,
|
| 2273 |
+
"step": 320
|
| 2274 |
+
},
|
| 2275 |
+
{
|
| 2276 |
+
"epoch": 0.5477815699658704,
|
| 2277 |
+
"grad_norm": 1.146308422088623,
|
| 2278 |
+
"learning_rate": 8.74883215267881e-05,
|
| 2279 |
+
"loss": 0.2263,
|
| 2280 |
+
"step": 321
|
| 2281 |
+
},
|
| 2282 |
+
{
|
| 2283 |
+
"epoch": 0.5494880546075085,
|
| 2284 |
+
"grad_norm": 1.1071500778198242,
|
| 2285 |
+
"learning_rate": 8.694738077799488e-05,
|
| 2286 |
+
"loss": 0.2474,
|
| 2287 |
+
"step": 322
|
| 2288 |
+
},
|
| 2289 |
+
{
|
| 2290 |
+
"epoch": 0.5511945392491467,
|
| 2291 |
+
"grad_norm": 1.529248833656311,
|
| 2292 |
+
"learning_rate": 8.64068283148786e-05,
|
| 2293 |
+
"loss": 0.4043,
|
| 2294 |
+
"step": 323
|
| 2295 |
+
},
|
| 2296 |
+
{
|
| 2297 |
+
"epoch": 0.552901023890785,
|
| 2298 |
+
"grad_norm": 1.2711862325668335,
|
| 2299 |
+
"learning_rate": 8.586668021764329e-05,
|
| 2300 |
+
"loss": 0.2838,
|
| 2301 |
+
"step": 324
|
| 2302 |
+
},
|
| 2303 |
+
{
|
| 2304 |
+
"epoch": 0.5546075085324232,
|
| 2305 |
+
"grad_norm": 1.1665675640106201,
|
| 2306 |
+
"learning_rate": 8.532695255446383e-05,
|
| 2307 |
+
"loss": 0.321,
|
| 2308 |
+
"step": 325
|
| 2309 |
+
},
|
| 2310 |
+
{
|
| 2311 |
+
"epoch": 0.5563139931740614,
|
| 2312 |
+
"grad_norm": 1.5756592750549316,
|
| 2313 |
+
"learning_rate": 8.478766138100834e-05,
|
| 2314 |
+
"loss": 0.3687,
|
| 2315 |
+
"step": 326
|
| 2316 |
+
},
|
| 2317 |
+
{
|
| 2318 |
+
"epoch": 0.5580204778156996,
|
| 2319 |
+
"grad_norm": 1.5955936908721924,
|
| 2320 |
+
"learning_rate": 8.424882273996024e-05,
|
| 2321 |
+
"loss": 0.3988,
|
| 2322 |
+
"step": 327
|
| 2323 |
+
},
|
| 2324 |
+
{
|
| 2325 |
+
"epoch": 0.5597269624573379,
|
| 2326 |
+
"grad_norm": 1.466103434562683,
|
| 2327 |
+
"learning_rate": 8.371045266054114e-05,
|
| 2328 |
+
"loss": 0.379,
|
| 2329 |
+
"step": 328
|
| 2330 |
+
},
|
| 2331 |
+
{
|
| 2332 |
+
"epoch": 0.5614334470989761,
|
| 2333 |
+
"grad_norm": 1.560831904411316,
|
| 2334 |
+
"learning_rate": 8.317256715803407e-05,
|
| 2335 |
+
"loss": 0.4634,
|
| 2336 |
+
"step": 329
|
| 2337 |
+
},
|
| 2338 |
+
{
|
| 2339 |
+
"epoch": 0.5631399317406144,
|
| 2340 |
+
"grad_norm": 1.5819355249404907,
|
| 2341 |
+
"learning_rate": 8.263518223330697e-05,
|
| 2342 |
+
"loss": 0.5335,
|
| 2343 |
+
"step": 330
|
| 2344 |
+
},
|
| 2345 |
+
{
|
| 2346 |
+
"epoch": 0.5648464163822525,
|
| 2347 |
+
"grad_norm": 1.350630760192871,
|
| 2348 |
+
"learning_rate": 8.209831387233676e-05,
|
| 2349 |
+
"loss": 0.2092,
|
| 2350 |
+
"step": 331
|
| 2351 |
+
},
|
| 2352 |
+
{
|
| 2353 |
+
"epoch": 0.5665529010238908,
|
| 2354 |
+
"grad_norm": 1.273508906364441,
|
| 2355 |
+
"learning_rate": 8.156197804573366e-05,
|
| 2356 |
+
"loss": 0.3387,
|
| 2357 |
+
"step": 332
|
| 2358 |
+
},
|
| 2359 |
+
{
|
| 2360 |
+
"epoch": 0.568259385665529,
|
| 2361 |
+
"grad_norm": 1.3282191753387451,
|
| 2362 |
+
"learning_rate": 8.102619070826639e-05,
|
| 2363 |
+
"loss": 0.3208,
|
| 2364 |
+
"step": 333
|
| 2365 |
+
},
|
| 2366 |
+
{
|
| 2367 |
+
"epoch": 0.5699658703071673,
|
| 2368 |
+
"grad_norm": 1.216187596321106,
|
| 2369 |
+
"learning_rate": 8.049096779838719e-05,
|
| 2370 |
+
"loss": 0.2695,
|
| 2371 |
+
"step": 334
|
| 2372 |
+
},
|
| 2373 |
+
{
|
| 2374 |
+
"epoch": 0.5716723549488054,
|
| 2375 |
+
"grad_norm": 1.2634128332138062,
|
| 2376 |
+
"learning_rate": 7.995632523775795e-05,
|
| 2377 |
+
"loss": 0.3636,
|
| 2378 |
+
"step": 335
|
| 2379 |
+
},
|
| 2380 |
+
{
|
| 2381 |
+
"epoch": 0.5733788395904437,
|
| 2382 |
+
"grad_norm": 1.1220873594284058,
|
| 2383 |
+
"learning_rate": 7.942227893077652e-05,
|
| 2384 |
+
"loss": 0.213,
|
| 2385 |
+
"step": 336
|
| 2386 |
+
},
|
| 2387 |
+
{
|
| 2388 |
+
"epoch": 0.5750853242320819,
|
| 2389 |
+
"grad_norm": 1.2302778959274292,
|
| 2390 |
+
"learning_rate": 7.888884476410348e-05,
|
| 2391 |
+
"loss": 0.3296,
|
| 2392 |
+
"step": 337
|
| 2393 |
+
},
|
| 2394 |
+
{
|
| 2395 |
+
"epoch": 0.5767918088737202,
|
| 2396 |
+
"grad_norm": 1.2520626783370972,
|
| 2397 |
+
"learning_rate": 7.835603860618972e-05,
|
| 2398 |
+
"loss": 0.3598,
|
| 2399 |
+
"step": 338
|
| 2400 |
+
},
|
| 2401 |
+
{
|
| 2402 |
+
"epoch": 0.5784982935153583,
|
| 2403 |
+
"grad_norm": 0.9339261651039124,
|
| 2404 |
+
"learning_rate": 7.782387630680421e-05,
|
| 2405 |
+
"loss": 0.1855,
|
| 2406 |
+
"step": 339
|
| 2407 |
+
},
|
| 2408 |
+
{
|
| 2409 |
+
"epoch": 0.5802047781569966,
|
| 2410 |
+
"grad_norm": 1.3704556226730347,
|
| 2411 |
+
"learning_rate": 7.729237369656269e-05,
|
| 2412 |
+
"loss": 0.2683,
|
| 2413 |
+
"step": 340
|
| 2414 |
+
},
|
| 2415 |
+
{
|
| 2416 |
+
"epoch": 0.5819112627986348,
|
| 2417 |
+
"grad_norm": 0.9682012796401978,
|
| 2418 |
+
"learning_rate": 7.676154658645656e-05,
|
| 2419 |
+
"loss": 0.2178,
|
| 2420 |
+
"step": 341
|
| 2421 |
+
},
|
| 2422 |
+
{
|
| 2423 |
+
"epoch": 0.5836177474402731,
|
| 2424 |
+
"grad_norm": 1.0641804933547974,
|
| 2425 |
+
"learning_rate": 7.623141076738271e-05,
|
| 2426 |
+
"loss": 0.2099,
|
| 2427 |
+
"step": 342
|
| 2428 |
+
},
|
| 2429 |
+
{
|
| 2430 |
+
"epoch": 0.5853242320819113,
|
| 2431 |
+
"grad_norm": 1.3178662061691284,
|
| 2432 |
+
"learning_rate": 7.570198200967362e-05,
|
| 2433 |
+
"loss": 0.2498,
|
| 2434 |
+
"step": 343
|
| 2435 |
+
},
|
| 2436 |
+
{
|
| 2437 |
+
"epoch": 0.5870307167235495,
|
| 2438 |
+
"grad_norm": 1.4389296770095825,
|
| 2439 |
+
"learning_rate": 7.517327606262836e-05,
|
| 2440 |
+
"loss": 0.3786,
|
| 2441 |
+
"step": 344
|
| 2442 |
+
},
|
| 2443 |
+
{
|
| 2444 |
+
"epoch": 0.5887372013651877,
|
| 2445 |
+
"grad_norm": 1.005115270614624,
|
| 2446 |
+
"learning_rate": 7.464530865404407e-05,
|
| 2447 |
+
"loss": 0.2031,
|
| 2448 |
+
"step": 345
|
| 2449 |
+
},
|
| 2450 |
+
{
|
| 2451 |
+
"epoch": 0.590443686006826,
|
| 2452 |
+
"grad_norm": 1.2811903953552246,
|
| 2453 |
+
"learning_rate": 7.411809548974792e-05,
|
| 2454 |
+
"loss": 0.2642,
|
| 2455 |
+
"step": 346
|
| 2456 |
+
},
|
| 2457 |
+
{
|
| 2458 |
+
"epoch": 0.5921501706484642,
|
| 2459 |
+
"grad_norm": 1.5106682777404785,
|
| 2460 |
+
"learning_rate": 7.359165225313019e-05,
|
| 2461 |
+
"loss": 0.3801,
|
| 2462 |
+
"step": 347
|
| 2463 |
+
},
|
| 2464 |
+
{
|
| 2465 |
+
"epoch": 0.5938566552901023,
|
| 2466 |
+
"grad_norm": 1.4023702144622803,
|
| 2467 |
+
"learning_rate": 7.30659946046774e-05,
|
| 2468 |
+
"loss": 0.2326,
|
| 2469 |
+
"step": 348
|
| 2470 |
+
},
|
| 2471 |
+
{
|
| 2472 |
+
"epoch": 0.5955631399317406,
|
| 2473 |
+
"grad_norm": 1.0239217281341553,
|
| 2474 |
+
"learning_rate": 7.25411381815068e-05,
|
| 2475 |
+
"loss": 0.2779,
|
| 2476 |
+
"step": 349
|
| 2477 |
+
},
|
| 2478 |
+
{
|
| 2479 |
+
"epoch": 0.5972696245733788,
|
| 2480 |
+
"grad_norm": 1.4149168729782104,
|
| 2481 |
+
"learning_rate": 7.20170985969008e-05,
|
| 2482 |
+
"loss": 0.4009,
|
| 2483 |
+
"step": 350
|
| 2484 |
+
},
|
| 2485 |
+
{
|
| 2486 |
+
"epoch": 0.5989761092150171,
|
| 2487 |
+
"grad_norm": 1.4914072751998901,
|
| 2488 |
+
"learning_rate": 7.149389143984295e-05,
|
| 2489 |
+
"loss": 0.375,
|
| 2490 |
+
"step": 351
|
| 2491 |
+
},
|
| 2492 |
+
{
|
| 2493 |
+
"epoch": 0.6006825938566553,
|
| 2494 |
+
"grad_norm": 1.46074640750885,
|
| 2495 |
+
"learning_rate": 7.097153227455379e-05,
|
| 2496 |
+
"loss": 0.3778,
|
| 2497 |
+
"step": 352
|
| 2498 |
+
},
|
| 2499 |
+
{
|
| 2500 |
+
"epoch": 0.6023890784982935,
|
| 2501 |
+
"grad_norm": 1.3193451166152954,
|
| 2502 |
+
"learning_rate": 7.045003664002809e-05,
|
| 2503 |
+
"loss": 0.3095,
|
| 2504 |
+
"step": 353
|
| 2505 |
+
},
|
| 2506 |
+
{
|
| 2507 |
+
"epoch": 0.6040955631399317,
|
| 2508 |
+
"grad_norm": 1.3252489566802979,
|
| 2509 |
+
"learning_rate": 6.992942004957271e-05,
|
| 2510 |
+
"loss": 0.3323,
|
| 2511 |
+
"step": 354
|
| 2512 |
+
},
|
| 2513 |
+
{
|
| 2514 |
+
"epoch": 0.60580204778157,
|
| 2515 |
+
"grad_norm": 1.5391185283660889,
|
| 2516 |
+
"learning_rate": 6.940969799034465e-05,
|
| 2517 |
+
"loss": 0.229,
|
| 2518 |
+
"step": 355
|
| 2519 |
+
},
|
| 2520 |
+
{
|
| 2521 |
+
"epoch": 0.6075085324232082,
|
| 2522 |
+
"grad_norm": 0.9688441753387451,
|
| 2523 |
+
"learning_rate": 6.889088592289093e-05,
|
| 2524 |
+
"loss": 0.1998,
|
| 2525 |
+
"step": 356
|
| 2526 |
+
},
|
| 2527 |
+
{
|
| 2528 |
+
"epoch": 0.6092150170648464,
|
| 2529 |
+
"grad_norm": 1.4932124614715576,
|
| 2530 |
+
"learning_rate": 6.837299928068817e-05,
|
| 2531 |
+
"loss": 0.4337,
|
| 2532 |
+
"step": 357
|
| 2533 |
+
},
|
| 2534 |
+
{
|
| 2535 |
+
"epoch": 0.6109215017064846,
|
| 2536 |
+
"grad_norm": 1.6151130199432373,
|
| 2537 |
+
"learning_rate": 6.785605346968386e-05,
|
| 2538 |
+
"loss": 0.3904,
|
| 2539 |
+
"step": 358
|
| 2540 |
+
},
|
| 2541 |
+
{
|
| 2542 |
+
"epoch": 0.6126279863481229,
|
| 2543 |
+
"grad_norm": 1.1556451320648193,
|
| 2544 |
+
"learning_rate": 6.73400638678378e-05,
|
| 2545 |
+
"loss": 0.3073,
|
| 2546 |
+
"step": 359
|
| 2547 |
+
},
|
| 2548 |
+
{
|
| 2549 |
+
"epoch": 0.6143344709897611,
|
| 2550 |
+
"grad_norm": 1.1989551782608032,
|
| 2551 |
+
"learning_rate": 6.682504582466482e-05,
|
| 2552 |
+
"loss": 0.2874,
|
| 2553 |
+
"step": 360
|
| 2554 |
+
},
|
| 2555 |
+
{
|
| 2556 |
+
"epoch": 0.6160409556313993,
|
| 2557 |
+
"grad_norm": 1.3045040369033813,
|
| 2558 |
+
"learning_rate": 6.6311014660778e-05,
|
| 2559 |
+
"loss": 0.3427,
|
| 2560 |
+
"step": 361
|
| 2561 |
+
},
|
| 2562 |
+
{
|
| 2563 |
+
"epoch": 0.6177474402730375,
|
| 2564 |
+
"grad_norm": 1.3451586961746216,
|
| 2565 |
+
"learning_rate": 6.579798566743314e-05,
|
| 2566 |
+
"loss": 0.3091,
|
| 2567 |
+
"step": 362
|
| 2568 |
+
},
|
| 2569 |
+
{
|
| 2570 |
+
"epoch": 0.6194539249146758,
|
| 2571 |
+
"grad_norm": 1.0896745920181274,
|
| 2572 |
+
"learning_rate": 6.528597410607364e-05,
|
| 2573 |
+
"loss": 0.2665,
|
| 2574 |
+
"step": 363
|
| 2575 |
+
},
|
| 2576 |
+
{
|
| 2577 |
+
"epoch": 0.621160409556314,
|
| 2578 |
+
"grad_norm": 1.5394841432571411,
|
| 2579 |
+
"learning_rate": 6.477499520787665e-05,
|
| 2580 |
+
"loss": 0.3172,
|
| 2581 |
+
"step": 364
|
| 2582 |
+
},
|
| 2583 |
+
{
|
| 2584 |
+
"epoch": 0.6228668941979523,
|
| 2585 |
+
"grad_norm": 1.2589391469955444,
|
| 2586 |
+
"learning_rate": 6.42650641733e-05,
|
| 2587 |
+
"loss": 0.2881,
|
| 2588 |
+
"step": 365
|
| 2589 |
+
},
|
| 2590 |
+
{
|
| 2591 |
+
"epoch": 0.6245733788395904,
|
| 2592 |
+
"grad_norm": 1.2845879793167114,
|
| 2593 |
+
"learning_rate": 6.375619617162985e-05,
|
| 2594 |
+
"loss": 0.3921,
|
| 2595 |
+
"step": 366
|
| 2596 |
+
},
|
| 2597 |
+
{
|
| 2598 |
+
"epoch": 0.6262798634812287,
|
| 2599 |
+
"grad_norm": 1.368632435798645,
|
| 2600 |
+
"learning_rate": 6.324840634052967e-05,
|
| 2601 |
+
"loss": 0.4102,
|
| 2602 |
+
"step": 367
|
| 2603 |
+
},
|
| 2604 |
+
{
|
| 2605 |
+
"epoch": 0.6279863481228669,
|
| 2606 |
+
"grad_norm": 1.332789421081543,
|
| 2607 |
+
"learning_rate": 6.27417097855897e-05,
|
| 2608 |
+
"loss": 0.3673,
|
| 2609 |
+
"step": 368
|
| 2610 |
+
},
|
| 2611 |
+
{
|
| 2612 |
+
"epoch": 0.6296928327645052,
|
| 2613 |
+
"grad_norm": 1.1067997217178345,
|
| 2614 |
+
"learning_rate": 6.223612157987786e-05,
|
| 2615 |
+
"loss": 0.2202,
|
| 2616 |
+
"step": 369
|
| 2617 |
+
},
|
| 2618 |
+
{
|
| 2619 |
+
"epoch": 0.6313993174061433,
|
| 2620 |
+
"grad_norm": 1.0624278783798218,
|
| 2621 |
+
"learning_rate": 6.173165676349103e-05,
|
| 2622 |
+
"loss": 0.2821,
|
| 2623 |
+
"step": 370
|
| 2624 |
+
},
|
| 2625 |
+
{
|
| 2626 |
+
"epoch": 0.6331058020477816,
|
| 2627 |
+
"grad_norm": 1.2008612155914307,
|
| 2628 |
+
"learning_rate": 6.122833034310793e-05,
|
| 2629 |
+
"loss": 0.2736,
|
| 2630 |
+
"step": 371
|
| 2631 |
+
},
|
| 2632 |
+
{
|
| 2633 |
+
"epoch": 0.6348122866894198,
|
| 2634 |
+
"grad_norm": 1.2561343908309937,
|
| 2635 |
+
"learning_rate": 6.0726157291542605e-05,
|
| 2636 |
+
"loss": 0.3426,
|
| 2637 |
+
"step": 372
|
| 2638 |
+
},
|
| 2639 |
+
{
|
| 2640 |
+
"epoch": 0.636518771331058,
|
| 2641 |
+
"grad_norm": 1.1441118717193604,
|
| 2642 |
+
"learning_rate": 6.02251525472989e-05,
|
| 2643 |
+
"loss": 0.2727,
|
| 2644 |
+
"step": 373
|
| 2645 |
+
},
|
| 2646 |
+
{
|
| 2647 |
+
"epoch": 0.6382252559726962,
|
| 2648 |
+
"grad_norm": 1.3579143285751343,
|
| 2649 |
+
"learning_rate": 5.9725331014126294e-05,
|
| 2650 |
+
"loss": 0.266,
|
| 2651 |
+
"step": 374
|
| 2652 |
+
},
|
| 2653 |
+
{
|
| 2654 |
+
"epoch": 0.6399317406143344,
|
| 2655 |
+
"grad_norm": 1.423100233078003,
|
| 2656 |
+
"learning_rate": 5.922670756057633e-05,
|
| 2657 |
+
"loss": 0.336,
|
| 2658 |
+
"step": 375
|
| 2659 |
+
},
|
| 2660 |
+
{
|
| 2661 |
+
"epoch": 0.6416382252559727,
|
| 2662 |
+
"grad_norm": 1.762887954711914,
|
| 2663 |
+
"learning_rate": 5.872929701956054e-05,
|
| 2664 |
+
"loss": 0.4905,
|
| 2665 |
+
"step": 376
|
| 2666 |
+
},
|
| 2667 |
+
{
|
| 2668 |
+
"epoch": 0.643344709897611,
|
| 2669 |
+
"grad_norm": 0.817060112953186,
|
| 2670 |
+
"learning_rate": 5.8233114187908935e-05,
|
| 2671 |
+
"loss": 0.1708,
|
| 2672 |
+
"step": 377
|
| 2673 |
+
},
|
| 2674 |
+
{
|
| 2675 |
+
"epoch": 0.6450511945392492,
|
| 2676 |
+
"grad_norm": 1.3539371490478516,
|
| 2677 |
+
"learning_rate": 5.773817382593008e-05,
|
| 2678 |
+
"loss": 0.4343,
|
| 2679 |
+
"step": 378
|
| 2680 |
+
},
|
| 2681 |
+
{
|
| 2682 |
+
"epoch": 0.6467576791808873,
|
| 2683 |
+
"grad_norm": 1.2161221504211426,
|
| 2684 |
+
"learning_rate": 5.7244490656971815e-05,
|
| 2685 |
+
"loss": 0.4114,
|
| 2686 |
+
"step": 379
|
| 2687 |
+
},
|
| 2688 |
+
{
|
| 2689 |
+
"epoch": 0.6484641638225256,
|
| 2690 |
+
"grad_norm": 1.4145348072052002,
|
| 2691 |
+
"learning_rate": 5.675207936698337e-05,
|
| 2692 |
+
"loss": 0.3869,
|
| 2693 |
+
"step": 380
|
| 2694 |
+
},
|
| 2695 |
+
{
|
| 2696 |
+
"epoch": 0.6501706484641638,
|
| 2697 |
+
"grad_norm": 1.0226906538009644,
|
| 2698 |
+
"learning_rate": 5.6260954604078585e-05,
|
| 2699 |
+
"loss": 0.2594,
|
| 2700 |
+
"step": 381
|
| 2701 |
+
},
|
| 2702 |
+
{
|
| 2703 |
+
"epoch": 0.6518771331058021,
|
| 2704 |
+
"grad_norm": 1.0303524732589722,
|
| 2705 |
+
"learning_rate": 5.577113097809989e-05,
|
| 2706 |
+
"loss": 0.2767,
|
| 2707 |
+
"step": 382
|
| 2708 |
+
},
|
| 2709 |
+
{
|
| 2710 |
+
"epoch": 0.6535836177474402,
|
| 2711 |
+
"grad_norm": 1.2397347688674927,
|
| 2712 |
+
"learning_rate": 5.528262306018395e-05,
|
| 2713 |
+
"loss": 0.3982,
|
| 2714 |
+
"step": 383
|
| 2715 |
+
},
|
| 2716 |
+
{
|
| 2717 |
+
"epoch": 0.6552901023890785,
|
| 2718 |
+
"grad_norm": 1.431638479232788,
|
| 2719 |
+
"learning_rate": 5.4795445382328037e-05,
|
| 2720 |
+
"loss": 0.3105,
|
| 2721 |
+
"step": 384
|
| 2722 |
+
},
|
| 2723 |
+
{
|
| 2724 |
+
"epoch": 0.6569965870307167,
|
| 2725 |
+
"grad_norm": 1.0612486600875854,
|
| 2726 |
+
"learning_rate": 5.4309612436957937e-05,
|
| 2727 |
+
"loss": 0.2136,
|
| 2728 |
+
"step": 385
|
| 2729 |
+
},
|
| 2730 |
+
{
|
| 2731 |
+
"epoch": 0.658703071672355,
|
| 2732 |
+
"grad_norm": 1.449487566947937,
|
| 2733 |
+
"learning_rate": 5.382513867649663e-05,
|
| 2734 |
+
"loss": 0.3583,
|
| 2735 |
+
"step": 386
|
| 2736 |
+
},
|
| 2737 |
+
{
|
| 2738 |
+
"epoch": 0.6604095563139932,
|
| 2739 |
+
"grad_norm": 1.537110447883606,
|
| 2740 |
+
"learning_rate": 5.3342038512934424e-05,
|
| 2741 |
+
"loss": 0.348,
|
| 2742 |
+
"step": 387
|
| 2743 |
+
},
|
| 2744 |
+
{
|
| 2745 |
+
"epoch": 0.6621160409556314,
|
| 2746 |
+
"grad_norm": 1.3030385971069336,
|
| 2747 |
+
"learning_rate": 5.286032631740023e-05,
|
| 2748 |
+
"loss": 0.2277,
|
| 2749 |
+
"step": 388
|
| 2750 |
+
},
|
| 2751 |
+
{
|
| 2752 |
+
"epoch": 0.6638225255972696,
|
| 2753 |
+
"grad_norm": 1.4260382652282715,
|
| 2754 |
+
"learning_rate": 5.238001641973422e-05,
|
| 2755 |
+
"loss": 0.3842,
|
| 2756 |
+
"step": 389
|
| 2757 |
+
},
|
| 2758 |
+
{
|
| 2759 |
+
"epoch": 0.6655290102389079,
|
| 2760 |
+
"grad_norm": 1.1528912782669067,
|
| 2761 |
+
"learning_rate": 5.190112310806126e-05,
|
| 2762 |
+
"loss": 0.2869,
|
| 2763 |
+
"step": 390
|
| 2764 |
+
},
|
| 2765 |
+
{
|
| 2766 |
+
"epoch": 0.6672354948805461,
|
| 2767 |
+
"grad_norm": 1.3670388460159302,
|
| 2768 |
+
"learning_rate": 5.142366062836599e-05,
|
| 2769 |
+
"loss": 0.3733,
|
| 2770 |
+
"step": 391
|
| 2771 |
+
},
|
| 2772 |
+
{
|
| 2773 |
+
"epoch": 0.6689419795221843,
|
| 2774 |
+
"grad_norm": 1.6885592937469482,
|
| 2775 |
+
"learning_rate": 5.09476431840692e-05,
|
| 2776 |
+
"loss": 0.4482,
|
| 2777 |
+
"step": 392
|
| 2778 |
+
},
|
| 2779 |
+
{
|
| 2780 |
+
"epoch": 0.6706484641638225,
|
| 2781 |
+
"grad_norm": 1.2889097929000854,
|
| 2782 |
+
"learning_rate": 5.047308493560506e-05,
|
| 2783 |
+
"loss": 0.3482,
|
| 2784 |
+
"step": 393
|
| 2785 |
+
},
|
| 2786 |
+
{
|
| 2787 |
+
"epoch": 0.6723549488054608,
|
| 2788 |
+
"grad_norm": 1.2984974384307861,
|
| 2789 |
+
"learning_rate": 5.000000000000002e-05,
|
| 2790 |
+
"loss": 0.3775,
|
| 2791 |
+
"step": 394
|
| 2792 |
+
},
|
| 2793 |
+
{
|
| 2794 |
+
"epoch": 0.674061433447099,
|
| 2795 |
+
"grad_norm": 1.285125732421875,
|
| 2796 |
+
"learning_rate": 4.952840245045278e-05,
|
| 2797 |
+
"loss": 0.3038,
|
| 2798 |
+
"step": 395
|
| 2799 |
+
},
|
| 2800 |
+
{
|
| 2801 |
+
"epoch": 0.6757679180887372,
|
| 2802 |
+
"grad_norm": 1.1072163581848145,
|
| 2803 |
+
"learning_rate": 4.9058306315915826e-05,
|
| 2804 |
+
"loss": 0.29,
|
| 2805 |
+
"step": 396
|
| 2806 |
+
},
|
| 2807 |
+
{
|
| 2808 |
+
"epoch": 0.6774744027303754,
|
| 2809 |
+
"grad_norm": 1.0346839427947998,
|
| 2810 |
+
"learning_rate": 4.8589725580677835e-05,
|
| 2811 |
+
"loss": 0.2204,
|
| 2812 |
+
"step": 397
|
| 2813 |
+
},
|
| 2814 |
+
{
|
| 2815 |
+
"epoch": 0.6791808873720137,
|
| 2816 |
+
"grad_norm": 0.9718276858329773,
|
| 2817 |
+
"learning_rate": 4.8122674183947836e-05,
|
| 2818 |
+
"loss": 0.1918,
|
| 2819 |
+
"step": 398
|
| 2820 |
+
},
|
| 2821 |
+
{
|
| 2822 |
+
"epoch": 0.6808873720136519,
|
| 2823 |
+
"grad_norm": 1.186357855796814,
|
| 2824 |
+
"learning_rate": 4.7657166019440614e-05,
|
| 2825 |
+
"loss": 0.3621,
|
| 2826 |
+
"step": 399
|
| 2827 |
+
},
|
| 2828 |
+
{
|
| 2829 |
+
"epoch": 0.6825938566552902,
|
| 2830 |
+
"grad_norm": 0.8597872853279114,
|
| 2831 |
+
"learning_rate": 4.7193214934963206e-05,
|
| 2832 |
+
"loss": 0.1928,
|
| 2833 |
+
"step": 400
|
| 2834 |
+
},
|
| 2835 |
+
{
|
| 2836 |
+
"epoch": 0.6843003412969283,
|
| 2837 |
+
"grad_norm": 1.0521053075790405,
|
| 2838 |
+
"learning_rate": 4.6730834732003104e-05,
|
| 2839 |
+
"loss": 0.2909,
|
| 2840 |
+
"step": 401
|
| 2841 |
+
},
|
| 2842 |
+
{
|
| 2843 |
+
"epoch": 0.6860068259385665,
|
| 2844 |
+
"grad_norm": 0.9495362043380737,
|
| 2845 |
+
"learning_rate": 4.6270039165317605e-05,
|
| 2846 |
+
"loss": 0.2419,
|
| 2847 |
+
"step": 402
|
| 2848 |
+
},
|
| 2849 |
+
{
|
| 2850 |
+
"epoch": 0.6877133105802048,
|
| 2851 |
+
"grad_norm": 1.3452738523483276,
|
| 2852 |
+
"learning_rate": 4.5810841942524864e-05,
|
| 2853 |
+
"loss": 0.4448,
|
| 2854 |
+
"step": 403
|
| 2855 |
+
},
|
| 2856 |
+
{
|
| 2857 |
+
"epoch": 0.689419795221843,
|
| 2858 |
+
"grad_norm": 1.2064647674560547,
|
| 2859 |
+
"learning_rate": 4.535325672369567e-05,
|
| 2860 |
+
"loss": 0.3684,
|
| 2861 |
+
"step": 404
|
| 2862 |
+
},
|
| 2863 |
+
{
|
| 2864 |
+
"epoch": 0.6911262798634812,
|
| 2865 |
+
"grad_norm": 1.2378430366516113,
|
| 2866 |
+
"learning_rate": 4.4897297120947624e-05,
|
| 2867 |
+
"loss": 0.3893,
|
| 2868 |
+
"step": 405
|
| 2869 |
+
},
|
| 2870 |
+
{
|
| 2871 |
+
"epoch": 0.6928327645051194,
|
| 2872 |
+
"grad_norm": 1.1042660474777222,
|
| 2873 |
+
"learning_rate": 4.444297669803981e-05,
|
| 2874 |
+
"loss": 0.2432,
|
| 2875 |
+
"step": 406
|
| 2876 |
+
},
|
| 2877 |
+
{
|
| 2878 |
+
"epoch": 0.6945392491467577,
|
| 2879 |
+
"grad_norm": 1.3131179809570312,
|
| 2880 |
+
"learning_rate": 4.399030896996945e-05,
|
| 2881 |
+
"loss": 0.3569,
|
| 2882 |
+
"step": 407
|
| 2883 |
+
},
|
| 2884 |
+
{
|
| 2885 |
+
"epoch": 0.6962457337883959,
|
| 2886 |
+
"grad_norm": 1.2609524726867676,
|
| 2887 |
+
"learning_rate": 4.353930740256996e-05,
|
| 2888 |
+
"loss": 0.3544,
|
| 2889 |
+
"step": 408
|
| 2890 |
+
},
|
| 2891 |
+
{
|
| 2892 |
+
"epoch": 0.6979522184300341,
|
| 2893 |
+
"grad_norm": 1.3554162979125977,
|
| 2894 |
+
"learning_rate": 4.308998541211015e-05,
|
| 2895 |
+
"loss": 0.2736,
|
| 2896 |
+
"step": 409
|
| 2897 |
+
},
|
| 2898 |
+
{
|
| 2899 |
+
"epoch": 0.6996587030716723,
|
| 2900 |
+
"grad_norm": 0.7854421138763428,
|
| 2901 |
+
"learning_rate": 4.264235636489542e-05,
|
| 2902 |
+
"loss": 0.1621,
|
| 2903 |
+
"step": 410
|
| 2904 |
+
},
|
| 2905 |
+
{
|
| 2906 |
+
"epoch": 0.7013651877133106,
|
| 2907 |
+
"grad_norm": 0.853469967842102,
|
| 2908 |
+
"learning_rate": 4.219643357686967e-05,
|
| 2909 |
+
"loss": 0.1664,
|
| 2910 |
+
"step": 411
|
| 2911 |
+
},
|
| 2912 |
+
{
|
| 2913 |
+
"epoch": 0.7030716723549488,
|
| 2914 |
+
"grad_norm": 1.3894639015197754,
|
| 2915 |
+
"learning_rate": 4.17522303132198e-05,
|
| 2916 |
+
"loss": 0.2859,
|
| 2917 |
+
"step": 412
|
| 2918 |
+
},
|
| 2919 |
+
{
|
| 2920 |
+
"epoch": 0.7047781569965871,
|
| 2921 |
+
"grad_norm": 1.0722367763519287,
|
| 2922 |
+
"learning_rate": 4.1309759787980565e-05,
|
| 2923 |
+
"loss": 0.2169,
|
| 2924 |
+
"step": 413
|
| 2925 |
+
},
|
| 2926 |
+
{
|
| 2927 |
+
"epoch": 0.7064846416382252,
|
| 2928 |
+
"grad_norm": 1.4003040790557861,
|
| 2929 |
+
"learning_rate": 4.086903516364179e-05,
|
| 2930 |
+
"loss": 0.3722,
|
| 2931 |
+
"step": 414
|
| 2932 |
+
},
|
| 2933 |
+
{
|
| 2934 |
+
"epoch": 0.7081911262798635,
|
| 2935 |
+
"grad_norm": 1.3530091047286987,
|
| 2936 |
+
"learning_rate": 4.0430069550756665e-05,
|
| 2937 |
+
"loss": 0.3906,
|
| 2938 |
+
"step": 415
|
| 2939 |
+
},
|
| 2940 |
+
{
|
| 2941 |
+
"epoch": 0.7098976109215017,
|
| 2942 |
+
"grad_norm": 1.2177244424819946,
|
| 2943 |
+
"learning_rate": 3.999287600755192e-05,
|
| 2944 |
+
"loss": 0.3143,
|
| 2945 |
+
"step": 416
|
| 2946 |
+
},
|
| 2947 |
+
{
|
| 2948 |
+
"epoch": 0.71160409556314,
|
| 2949 |
+
"grad_norm": 1.3488582372665405,
|
| 2950 |
+
"learning_rate": 3.9557467539539115e-05,
|
| 2951 |
+
"loss": 0.3465,
|
| 2952 |
+
"step": 417
|
| 2953 |
+
},
|
| 2954 |
+
{
|
| 2955 |
+
"epoch": 0.7133105802047781,
|
| 2956 |
+
"grad_norm": 1.2977614402770996,
|
| 2957 |
+
"learning_rate": 3.9123857099127936e-05,
|
| 2958 |
+
"loss": 0.26,
|
| 2959 |
+
"step": 418
|
| 2960 |
+
},
|
| 2961 |
+
{
|
| 2962 |
+
"epoch": 0.7150170648464164,
|
| 2963 |
+
"grad_norm": 1.2360209226608276,
|
| 2964 |
+
"learning_rate": 3.8692057585240905e-05,
|
| 2965 |
+
"loss": 0.3121,
|
| 2966 |
+
"step": 419
|
| 2967 |
+
},
|
| 2968 |
+
{
|
| 2969 |
+
"epoch": 0.7167235494880546,
|
| 2970 |
+
"grad_norm": 1.3001514673233032,
|
| 2971 |
+
"learning_rate": 3.826208184292952e-05,
|
| 2972 |
+
"loss": 0.2945,
|
| 2973 |
+
"step": 420
|
| 2974 |
+
},
|
| 2975 |
+
{
|
| 2976 |
+
"epoch": 0.7184300341296929,
|
| 2977 |
+
"grad_norm": 1.186898946762085,
|
| 2978 |
+
"learning_rate": 3.783394266299228e-05,
|
| 2979 |
+
"loss": 0.2932,
|
| 2980 |
+
"step": 421
|
| 2981 |
+
},
|
| 2982 |
+
{
|
| 2983 |
+
"epoch": 0.7201365187713311,
|
| 2984 |
+
"grad_norm": 0.8554227352142334,
|
| 2985 |
+
"learning_rate": 3.7407652781594095e-05,
|
| 2986 |
+
"loss": 0.2003,
|
| 2987 |
+
"step": 422
|
| 2988 |
+
},
|
| 2989 |
+
{
|
| 2990 |
+
"epoch": 0.7218430034129693,
|
| 2991 |
+
"grad_norm": 1.1296398639678955,
|
| 2992 |
+
"learning_rate": 3.698322487988755e-05,
|
| 2993 |
+
"loss": 0.2598,
|
| 2994 |
+
"step": 423
|
| 2995 |
+
},
|
| 2996 |
+
{
|
| 2997 |
+
"epoch": 0.7235494880546075,
|
| 2998 |
+
"grad_norm": 1.6475400924682617,
|
| 2999 |
+
"learning_rate": 3.6560671583635467e-05,
|
| 3000 |
+
"loss": 0.3723,
|
| 3001 |
+
"step": 424
|
| 3002 |
+
},
|
| 3003 |
+
{
|
| 3004 |
+
"epoch": 0.7252559726962458,
|
| 3005 |
+
"grad_norm": 1.3780394792556763,
|
| 3006 |
+
"learning_rate": 3.614000546283547e-05,
|
| 3007 |
+
"loss": 0.3928,
|
| 3008 |
+
"step": 425
|
| 3009 |
+
},
|
| 3010 |
+
{
|
| 3011 |
+
"epoch": 0.726962457337884,
|
| 3012 |
+
"grad_norm": 0.9477567076683044,
|
| 3013 |
+
"learning_rate": 3.5721239031346066e-05,
|
| 3014 |
+
"loss": 0.192,
|
| 3015 |
+
"step": 426
|
| 3016 |
+
},
|
| 3017 |
+
{
|
| 3018 |
+
"epoch": 0.7286689419795221,
|
| 3019 |
+
"grad_norm": 1.4966415166854858,
|
| 3020 |
+
"learning_rate": 3.530438474651428e-05,
|
| 3021 |
+
"loss": 0.3359,
|
| 3022 |
+
"step": 427
|
| 3023 |
+
},
|
| 3024 |
+
{
|
| 3025 |
+
"epoch": 0.7303754266211604,
|
| 3026 |
+
"grad_norm": 1.2480156421661377,
|
| 3027 |
+
"learning_rate": 3.4889455008805106e-05,
|
| 3028 |
+
"loss": 0.342,
|
| 3029 |
+
"step": 428
|
| 3030 |
+
},
|
| 3031 |
+
{
|
| 3032 |
+
"epoch": 0.7320819112627986,
|
| 3033 |
+
"grad_norm": 1.4273872375488281,
|
| 3034 |
+
"learning_rate": 3.447646216143268e-05,
|
| 3035 |
+
"loss": 0.4465,
|
| 3036 |
+
"step": 429
|
| 3037 |
+
},
|
| 3038 |
+
{
|
| 3039 |
+
"epoch": 0.7337883959044369,
|
| 3040 |
+
"grad_norm": 0.969070553779602,
|
| 3041 |
+
"learning_rate": 3.406541848999312e-05,
|
| 3042 |
+
"loss": 0.1959,
|
| 3043 |
+
"step": 430
|
| 3044 |
+
},
|
| 3045 |
+
{
|
| 3046 |
+
"epoch": 0.735494880546075,
|
| 3047 |
+
"grad_norm": 1.2739261388778687,
|
| 3048 |
+
"learning_rate": 3.365633622209891e-05,
|
| 3049 |
+
"loss": 0.3226,
|
| 3050 |
+
"step": 431
|
| 3051 |
+
},
|
| 3052 |
+
{
|
| 3053 |
+
"epoch": 0.7372013651877133,
|
| 3054 |
+
"grad_norm": 1.2719146013259888,
|
| 3055 |
+
"learning_rate": 3.324922752701528e-05,
|
| 3056 |
+
"loss": 0.2838,
|
| 3057 |
+
"step": 432
|
| 3058 |
+
},
|
| 3059 |
+
{
|
| 3060 |
+
"epoch": 0.7389078498293515,
|
| 3061 |
+
"grad_norm": 1.5424641370773315,
|
| 3062 |
+
"learning_rate": 3.2844104515298155e-05,
|
| 3063 |
+
"loss": 0.2901,
|
| 3064 |
+
"step": 433
|
| 3065 |
+
},
|
| 3066 |
+
{
|
| 3067 |
+
"epoch": 0.7406143344709898,
|
| 3068 |
+
"grad_norm": 1.3164613246917725,
|
| 3069 |
+
"learning_rate": 3.244097923843398e-05,
|
| 3070 |
+
"loss": 0.3546,
|
| 3071 |
+
"step": 434
|
| 3072 |
+
},
|
| 3073 |
+
{
|
| 3074 |
+
"epoch": 0.742320819112628,
|
| 3075 |
+
"grad_norm": 1.4883449077606201,
|
| 3076 |
+
"learning_rate": 3.2039863688481055e-05,
|
| 3077 |
+
"loss": 0.3153,
|
| 3078 |
+
"step": 435
|
| 3079 |
+
},
|
| 3080 |
+
{
|
| 3081 |
+
"epoch": 0.7440273037542662,
|
| 3082 |
+
"grad_norm": 1.155255913734436,
|
| 3083 |
+
"learning_rate": 3.164076979771287e-05,
|
| 3084 |
+
"loss": 0.1993,
|
| 3085 |
+
"step": 436
|
| 3086 |
+
},
|
| 3087 |
+
{
|
| 3088 |
+
"epoch": 0.7457337883959044,
|
| 3089 |
+
"grad_norm": 1.0166136026382446,
|
| 3090 |
+
"learning_rate": 3.1243709438263255e-05,
|
| 3091 |
+
"loss": 0.2152,
|
| 3092 |
+
"step": 437
|
| 3093 |
+
},
|
| 3094 |
+
{
|
| 3095 |
+
"epoch": 0.7474402730375427,
|
| 3096 |
+
"grad_norm": 1.1781895160675049,
|
| 3097 |
+
"learning_rate": 3.0848694421773075e-05,
|
| 3098 |
+
"loss": 0.2947,
|
| 3099 |
+
"step": 438
|
| 3100 |
+
},
|
| 3101 |
+
{
|
| 3102 |
+
"epoch": 0.7491467576791809,
|
| 3103 |
+
"grad_norm": 1.4823838472366333,
|
| 3104 |
+
"learning_rate": 3.0455736499038845e-05,
|
| 3105 |
+
"loss": 0.3206,
|
| 3106 |
+
"step": 439
|
| 3107 |
+
},
|
| 3108 |
+
{
|
| 3109 |
+
"epoch": 0.7508532423208191,
|
| 3110 |
+
"grad_norm": 1.2390568256378174,
|
| 3111 |
+
"learning_rate": 3.0064847359663284e-05,
|
| 3112 |
+
"loss": 0.2962,
|
| 3113 |
+
"step": 440
|
| 3114 |
+
},
|
| 3115 |
+
{
|
| 3116 |
+
"epoch": 0.7525597269624573,
|
| 3117 |
+
"grad_norm": 1.1224266290664673,
|
| 3118 |
+
"learning_rate": 2.9676038631707593e-05,
|
| 3119 |
+
"loss": 0.2043,
|
| 3120 |
+
"step": 441
|
| 3121 |
+
},
|
| 3122 |
+
{
|
| 3123 |
+
"epoch": 0.7525597269624573,
|
| 3124 |
+
"eval_loss": 0.27935898303985596,
|
| 3125 |
+
"eval_runtime": 7.7116,
|
| 3126 |
+
"eval_samples_per_second": 32.03,
|
| 3127 |
+
"eval_steps_per_second": 16.08,
|
| 3128 |
+
"step": 441
|
| 3129 |
}
|
| 3130 |
],
|
| 3131 |
"logging_steps": 1,
|
|
|
|
| 3145 |
"attributes": {}
|
| 3146 |
}
|
| 3147 |
},
|
| 3148 |
+
"total_flos": 8.156578334323507e+16,
|
| 3149 |
"train_batch_size": 2,
|
| 3150 |
"trial_name": null,
|
| 3151 |
"trial_params": null
|