Training in progress, epoch 2, checkpoint
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +2251 -3
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 295765866
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8898176c9ee551ed32dd529cc843abd1840164f3645656fdebe1c239a3d87ae1
|
3 |
size 295765866
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3895045b743f09572b379a470906b3b61fec60b0f952eeaa286c3266a3af0c82
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd3e72f9db43bec86b839573da68f94e1b5aee7b8c1a2d328ad343d08e7ad7dc
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -2248,6 +2248,2254 @@
|
|
2248 |
"eval_samples_per_second": 35.534,
|
2249 |
"eval_steps_per_second": 8.9,
|
2250 |
"step": 319
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2251 |
}
|
2252 |
],
|
2253 |
"logging_steps": 1,
|
@@ -2267,7 +4515,7 @@
|
|
2267 |
"attributes": {}
|
2268 |
}
|
2269 |
},
|
2270 |
-
"total_flos":
|
2271 |
"train_batch_size": 4,
|
2272 |
"trial_name": null,
|
2273 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 2.0,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 639,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
2248 |
"eval_samples_per_second": 35.534,
|
2249 |
"eval_steps_per_second": 8.9,
|
2250 |
"step": 319
|
2251 |
+
},
|
2252 |
+
{
|
2253 |
+
"epoch": 1.001564945226917,
|
2254 |
+
"grad_norm": NaN,
|
2255 |
+
"learning_rate": 7.580954190982213e-05,
|
2256 |
+
"loss": 0.0,
|
2257 |
+
"step": 320
|
2258 |
+
},
|
2259 |
+
{
|
2260 |
+
"epoch": 1.0046948356807512,
|
2261 |
+
"grad_norm": NaN,
|
2262 |
+
"learning_rate": 7.566733620348596e-05,
|
2263 |
+
"loss": 0.0,
|
2264 |
+
"step": 321
|
2265 |
+
},
|
2266 |
+
{
|
2267 |
+
"epoch": 1.0078247261345852,
|
2268 |
+
"grad_norm": NaN,
|
2269 |
+
"learning_rate": 7.552484802203858e-05,
|
2270 |
+
"loss": 0.0,
|
2271 |
+
"step": 322
|
2272 |
+
},
|
2273 |
+
{
|
2274 |
+
"epoch": 1.0109546165884193,
|
2275 |
+
"grad_norm": NaN,
|
2276 |
+
"learning_rate": 7.538207893359611e-05,
|
2277 |
+
"loss": 0.0,
|
2278 |
+
"step": 323
|
2279 |
+
},
|
2280 |
+
{
|
2281 |
+
"epoch": 1.0140845070422535,
|
2282 |
+
"grad_norm": NaN,
|
2283 |
+
"learning_rate": 7.523903050936621e-05,
|
2284 |
+
"loss": 0.0,
|
2285 |
+
"step": 324
|
2286 |
+
},
|
2287 |
+
{
|
2288 |
+
"epoch": 1.0172143974960877,
|
2289 |
+
"grad_norm": NaN,
|
2290 |
+
"learning_rate": 7.509570432363061e-05,
|
2291 |
+
"loss": 0.0,
|
2292 |
+
"step": 325
|
2293 |
+
},
|
2294 |
+
{
|
2295 |
+
"epoch": 1.0203442879499218,
|
2296 |
+
"grad_norm": NaN,
|
2297 |
+
"learning_rate": 7.495210195372795e-05,
|
2298 |
+
"loss": 0.0,
|
2299 |
+
"step": 326
|
2300 |
+
},
|
2301 |
+
{
|
2302 |
+
"epoch": 1.0234741784037558,
|
2303 |
+
"grad_norm": NaN,
|
2304 |
+
"learning_rate": 7.48082249800363e-05,
|
2305 |
+
"loss": 0.0,
|
2306 |
+
"step": 327
|
2307 |
+
},
|
2308 |
+
{
|
2309 |
+
"epoch": 1.02660406885759,
|
2310 |
+
"grad_norm": NaN,
|
2311 |
+
"learning_rate": 7.46640749859558e-05,
|
2312 |
+
"loss": 0.0,
|
2313 |
+
"step": 328
|
2314 |
+
},
|
2315 |
+
{
|
2316 |
+
"epoch": 1.029733959311424,
|
2317 |
+
"grad_norm": NaN,
|
2318 |
+
"learning_rate": 7.451965355789129e-05,
|
2319 |
+
"loss": 0.0,
|
2320 |
+
"step": 329
|
2321 |
+
},
|
2322 |
+
{
|
2323 |
+
"epoch": 1.0328638497652582,
|
2324 |
+
"grad_norm": NaN,
|
2325 |
+
"learning_rate": 7.437496228523473e-05,
|
2326 |
+
"loss": 0.0,
|
2327 |
+
"step": 330
|
2328 |
+
},
|
2329 |
+
{
|
2330 |
+
"epoch": 1.0359937402190924,
|
2331 |
+
"grad_norm": NaN,
|
2332 |
+
"learning_rate": 7.423000276034786e-05,
|
2333 |
+
"loss": 0.0,
|
2334 |
+
"step": 331
|
2335 |
+
},
|
2336 |
+
{
|
2337 |
+
"epoch": 1.0391236306729263,
|
2338 |
+
"grad_norm": NaN,
|
2339 |
+
"learning_rate": 7.408477657854458e-05,
|
2340 |
+
"loss": 0.0,
|
2341 |
+
"step": 332
|
2342 |
+
},
|
2343 |
+
{
|
2344 |
+
"epoch": 1.0422535211267605,
|
2345 |
+
"grad_norm": NaN,
|
2346 |
+
"learning_rate": 7.393928533807334e-05,
|
2347 |
+
"loss": 0.0,
|
2348 |
+
"step": 333
|
2349 |
+
},
|
2350 |
+
{
|
2351 |
+
"epoch": 1.0453834115805947,
|
2352 |
+
"grad_norm": NaN,
|
2353 |
+
"learning_rate": 7.379353064009976e-05,
|
2354 |
+
"loss": 0.0,
|
2355 |
+
"step": 334
|
2356 |
+
},
|
2357 |
+
{
|
2358 |
+
"epoch": 1.0485133020344288,
|
2359 |
+
"grad_norm": NaN,
|
2360 |
+
"learning_rate": 7.364751408868877e-05,
|
2361 |
+
"loss": 0.0,
|
2362 |
+
"step": 335
|
2363 |
+
},
|
2364 |
+
{
|
2365 |
+
"epoch": 1.051643192488263,
|
2366 |
+
"grad_norm": NaN,
|
2367 |
+
"learning_rate": 7.350123729078705e-05,
|
2368 |
+
"loss": 0.0,
|
2369 |
+
"step": 336
|
2370 |
+
},
|
2371 |
+
{
|
2372 |
+
"epoch": 1.054773082942097,
|
2373 |
+
"grad_norm": NaN,
|
2374 |
+
"learning_rate": 7.335470185620543e-05,
|
2375 |
+
"loss": 0.0,
|
2376 |
+
"step": 337
|
2377 |
+
},
|
2378 |
+
{
|
2379 |
+
"epoch": 1.057902973395931,
|
2380 |
+
"grad_norm": NaN,
|
2381 |
+
"learning_rate": 7.320790939760105e-05,
|
2382 |
+
"loss": 0.0,
|
2383 |
+
"step": 338
|
2384 |
+
},
|
2385 |
+
{
|
2386 |
+
"epoch": 1.0610328638497653,
|
2387 |
+
"grad_norm": NaN,
|
2388 |
+
"learning_rate": 7.306086153045965e-05,
|
2389 |
+
"loss": 0.0,
|
2390 |
+
"step": 339
|
2391 |
+
},
|
2392 |
+
{
|
2393 |
+
"epoch": 1.0641627543035994,
|
2394 |
+
"grad_norm": NaN,
|
2395 |
+
"learning_rate": 7.291355987307784e-05,
|
2396 |
+
"loss": 0.0,
|
2397 |
+
"step": 340
|
2398 |
+
},
|
2399 |
+
{
|
2400 |
+
"epoch": 1.0672926447574336,
|
2401 |
+
"grad_norm": NaN,
|
2402 |
+
"learning_rate": 7.276600604654522e-05,
|
2403 |
+
"loss": 0.0,
|
2404 |
+
"step": 341
|
2405 |
+
},
|
2406 |
+
{
|
2407 |
+
"epoch": 1.0704225352112675,
|
2408 |
+
"grad_norm": NaN,
|
2409 |
+
"learning_rate": 7.261820167472658e-05,
|
2410 |
+
"loss": 0.0,
|
2411 |
+
"step": 342
|
2412 |
+
},
|
2413 |
+
{
|
2414 |
+
"epoch": 1.0735524256651017,
|
2415 |
+
"grad_norm": NaN,
|
2416 |
+
"learning_rate": 7.247014838424404e-05,
|
2417 |
+
"loss": 0.0,
|
2418 |
+
"step": 343
|
2419 |
+
},
|
2420 |
+
{
|
2421 |
+
"epoch": 1.0766823161189358,
|
2422 |
+
"grad_norm": NaN,
|
2423 |
+
"learning_rate": 7.232184780445907e-05,
|
2424 |
+
"loss": 0.0,
|
2425 |
+
"step": 344
|
2426 |
+
},
|
2427 |
+
{
|
2428 |
+
"epoch": 1.07981220657277,
|
2429 |
+
"grad_norm": NaN,
|
2430 |
+
"learning_rate": 7.217330156745471e-05,
|
2431 |
+
"loss": 0.0,
|
2432 |
+
"step": 345
|
2433 |
+
},
|
2434 |
+
{
|
2435 |
+
"epoch": 1.0829420970266042,
|
2436 |
+
"grad_norm": NaN,
|
2437 |
+
"learning_rate": 7.202451130801742e-05,
|
2438 |
+
"loss": 0.0,
|
2439 |
+
"step": 346
|
2440 |
+
},
|
2441 |
+
{
|
2442 |
+
"epoch": 1.086071987480438,
|
2443 |
+
"grad_norm": NaN,
|
2444 |
+
"learning_rate": 7.187547866361925e-05,
|
2445 |
+
"loss": 0.0,
|
2446 |
+
"step": 347
|
2447 |
+
},
|
2448 |
+
{
|
2449 |
+
"epoch": 1.0892018779342723,
|
2450 |
+
"grad_norm": NaN,
|
2451 |
+
"learning_rate": 7.172620527439976e-05,
|
2452 |
+
"loss": 0.0,
|
2453 |
+
"step": 348
|
2454 |
+
},
|
2455 |
+
{
|
2456 |
+
"epoch": 1.0923317683881064,
|
2457 |
+
"grad_norm": NaN,
|
2458 |
+
"learning_rate": 7.157669278314789e-05,
|
2459 |
+
"loss": 0.0,
|
2460 |
+
"step": 349
|
2461 |
+
},
|
2462 |
+
{
|
2463 |
+
"epoch": 1.0954616588419406,
|
2464 |
+
"grad_norm": NaN,
|
2465 |
+
"learning_rate": 7.1426942835284e-05,
|
2466 |
+
"loss": 0.0,
|
2467 |
+
"step": 350
|
2468 |
+
},
|
2469 |
+
{
|
2470 |
+
"epoch": 1.0985915492957747,
|
2471 |
+
"grad_norm": NaN,
|
2472 |
+
"learning_rate": 7.127695707884176e-05,
|
2473 |
+
"loss": 0.0,
|
2474 |
+
"step": 351
|
2475 |
+
},
|
2476 |
+
{
|
2477 |
+
"epoch": 1.1017214397496087,
|
2478 |
+
"grad_norm": NaN,
|
2479 |
+
"learning_rate": 7.112673716444988e-05,
|
2480 |
+
"loss": 0.0,
|
2481 |
+
"step": 352
|
2482 |
+
},
|
2483 |
+
{
|
2484 |
+
"epoch": 1.1048513302034428,
|
2485 |
+
"grad_norm": NaN,
|
2486 |
+
"learning_rate": 7.09762847453141e-05,
|
2487 |
+
"loss": 0.0,
|
2488 |
+
"step": 353
|
2489 |
+
},
|
2490 |
+
{
|
2491 |
+
"epoch": 1.107981220657277,
|
2492 |
+
"grad_norm": NaN,
|
2493 |
+
"learning_rate": 7.082560147719893e-05,
|
2494 |
+
"loss": 0.0,
|
2495 |
+
"step": 354
|
2496 |
+
},
|
2497 |
+
{
|
2498 |
+
"epoch": 1.1111111111111112,
|
2499 |
+
"grad_norm": NaN,
|
2500 |
+
"learning_rate": 7.067468901840937e-05,
|
2501 |
+
"loss": 0.0,
|
2502 |
+
"step": 355
|
2503 |
+
},
|
2504 |
+
{
|
2505 |
+
"epoch": 1.1142410015649453,
|
2506 |
+
"grad_norm": NaN,
|
2507 |
+
"learning_rate": 7.052354902977278e-05,
|
2508 |
+
"loss": 0.0,
|
2509 |
+
"step": 356
|
2510 |
+
},
|
2511 |
+
{
|
2512 |
+
"epoch": 1.1173708920187793,
|
2513 |
+
"grad_norm": NaN,
|
2514 |
+
"learning_rate": 7.037218317462051e-05,
|
2515 |
+
"loss": 0.0,
|
2516 |
+
"step": 357
|
2517 |
+
},
|
2518 |
+
{
|
2519 |
+
"epoch": 1.1205007824726134,
|
2520 |
+
"grad_norm": NaN,
|
2521 |
+
"learning_rate": 7.022059311876962e-05,
|
2522 |
+
"loss": 0.0,
|
2523 |
+
"step": 358
|
2524 |
+
},
|
2525 |
+
{
|
2526 |
+
"epoch": 1.1236306729264476,
|
2527 |
+
"grad_norm": NaN,
|
2528 |
+
"learning_rate": 7.006878053050459e-05,
|
2529 |
+
"loss": 0.0,
|
2530 |
+
"step": 359
|
2531 |
+
},
|
2532 |
+
{
|
2533 |
+
"epoch": 1.1267605633802817,
|
2534 |
+
"grad_norm": NaN,
|
2535 |
+
"learning_rate": 6.991674708055885e-05,
|
2536 |
+
"loss": 0.0,
|
2537 |
+
"step": 360
|
2538 |
+
},
|
2539 |
+
{
|
2540 |
+
"epoch": 1.1298904538341157,
|
2541 |
+
"grad_norm": NaN,
|
2542 |
+
"learning_rate": 6.976449444209654e-05,
|
2543 |
+
"loss": 0.0,
|
2544 |
+
"step": 361
|
2545 |
+
},
|
2546 |
+
{
|
2547 |
+
"epoch": 1.1330203442879498,
|
2548 |
+
"grad_norm": NaN,
|
2549 |
+
"learning_rate": 6.961202429069396e-05,
|
2550 |
+
"loss": 0.0,
|
2551 |
+
"step": 362
|
2552 |
+
},
|
2553 |
+
{
|
2554 |
+
"epoch": 1.136150234741784,
|
2555 |
+
"grad_norm": NaN,
|
2556 |
+
"learning_rate": 6.945933830432127e-05,
|
2557 |
+
"loss": 0.0,
|
2558 |
+
"step": 363
|
2559 |
+
},
|
2560 |
+
{
|
2561 |
+
"epoch": 1.1392801251956182,
|
2562 |
+
"grad_norm": NaN,
|
2563 |
+
"learning_rate": 6.930643816332385e-05,
|
2564 |
+
"loss": 0.0,
|
2565 |
+
"step": 364
|
2566 |
+
},
|
2567 |
+
{
|
2568 |
+
"epoch": 1.1424100156494523,
|
2569 |
+
"grad_norm": NaN,
|
2570 |
+
"learning_rate": 6.915332555040396e-05,
|
2571 |
+
"loss": 0.0,
|
2572 |
+
"step": 365
|
2573 |
+
},
|
2574 |
+
{
|
2575 |
+
"epoch": 1.1455399061032865,
|
2576 |
+
"grad_norm": NaN,
|
2577 |
+
"learning_rate": 6.900000215060216e-05,
|
2578 |
+
"loss": 0.0,
|
2579 |
+
"step": 366
|
2580 |
+
},
|
2581 |
+
{
|
2582 |
+
"epoch": 1.1486697965571204,
|
2583 |
+
"grad_norm": NaN,
|
2584 |
+
"learning_rate": 6.884646965127876e-05,
|
2585 |
+
"loss": 0.0,
|
2586 |
+
"step": 367
|
2587 |
+
},
|
2588 |
+
{
|
2589 |
+
"epoch": 1.1517996870109546,
|
2590 |
+
"grad_norm": NaN,
|
2591 |
+
"learning_rate": 6.869272974209525e-05,
|
2592 |
+
"loss": 0.0,
|
2593 |
+
"step": 368
|
2594 |
+
},
|
2595 |
+
{
|
2596 |
+
"epoch": 1.1549295774647887,
|
2597 |
+
"grad_norm": NaN,
|
2598 |
+
"learning_rate": 6.853878411499574e-05,
|
2599 |
+
"loss": 0.0,
|
2600 |
+
"step": 369
|
2601 |
+
},
|
2602 |
+
{
|
2603 |
+
"epoch": 1.158059467918623,
|
2604 |
+
"grad_norm": NaN,
|
2605 |
+
"learning_rate": 6.83846344641883e-05,
|
2606 |
+
"loss": 0.0,
|
2607 |
+
"step": 370
|
2608 |
+
},
|
2609 |
+
{
|
2610 |
+
"epoch": 1.1611893583724568,
|
2611 |
+
"grad_norm": NaN,
|
2612 |
+
"learning_rate": 6.823028248612632e-05,
|
2613 |
+
"loss": 0.0,
|
2614 |
+
"step": 371
|
2615 |
+
},
|
2616 |
+
{
|
2617 |
+
"epoch": 1.164319248826291,
|
2618 |
+
"grad_norm": NaN,
|
2619 |
+
"learning_rate": 6.807572987948986e-05,
|
2620 |
+
"loss": 0.0,
|
2621 |
+
"step": 372
|
2622 |
+
},
|
2623 |
+
{
|
2624 |
+
"epoch": 1.1674491392801252,
|
2625 |
+
"grad_norm": NaN,
|
2626 |
+
"learning_rate": 6.792097834516697e-05,
|
2627 |
+
"loss": 0.0,
|
2628 |
+
"step": 373
|
2629 |
+
},
|
2630 |
+
{
|
2631 |
+
"epoch": 1.1705790297339593,
|
2632 |
+
"grad_norm": NaN,
|
2633 |
+
"learning_rate": 6.776602958623487e-05,
|
2634 |
+
"loss": 0.0,
|
2635 |
+
"step": 374
|
2636 |
+
},
|
2637 |
+
{
|
2638 |
+
"epoch": 1.1737089201877935,
|
2639 |
+
"grad_norm": NaN,
|
2640 |
+
"learning_rate": 6.761088530794136e-05,
|
2641 |
+
"loss": 0.0,
|
2642 |
+
"step": 375
|
2643 |
+
},
|
2644 |
+
{
|
2645 |
+
"epoch": 1.1768388106416277,
|
2646 |
+
"grad_norm": NaN,
|
2647 |
+
"learning_rate": 6.745554721768597e-05,
|
2648 |
+
"loss": 0.0,
|
2649 |
+
"step": 376
|
2650 |
+
},
|
2651 |
+
{
|
2652 |
+
"epoch": 1.1799687010954616,
|
2653 |
+
"grad_norm": NaN,
|
2654 |
+
"learning_rate": 6.730001702500113e-05,
|
2655 |
+
"loss": 0.0,
|
2656 |
+
"step": 377
|
2657 |
+
},
|
2658 |
+
{
|
2659 |
+
"epoch": 1.1830985915492958,
|
2660 |
+
"grad_norm": NaN,
|
2661 |
+
"learning_rate": 6.714429644153344e-05,
|
2662 |
+
"loss": 0.0,
|
2663 |
+
"step": 378
|
2664 |
+
},
|
2665 |
+
{
|
2666 |
+
"epoch": 1.18622848200313,
|
2667 |
+
"grad_norm": NaN,
|
2668 |
+
"learning_rate": 6.698838718102478e-05,
|
2669 |
+
"loss": 0.0,
|
2670 |
+
"step": 379
|
2671 |
+
},
|
2672 |
+
{
|
2673 |
+
"epoch": 1.189358372456964,
|
2674 |
+
"grad_norm": NaN,
|
2675 |
+
"learning_rate": 6.683229095929347e-05,
|
2676 |
+
"loss": 0.0,
|
2677 |
+
"step": 380
|
2678 |
+
},
|
2679 |
+
{
|
2680 |
+
"epoch": 1.192488262910798,
|
2681 |
+
"grad_norm": NaN,
|
2682 |
+
"learning_rate": 6.66760094942154e-05,
|
2683 |
+
"loss": 0.0,
|
2684 |
+
"step": 381
|
2685 |
+
},
|
2686 |
+
{
|
2687 |
+
"epoch": 1.1956181533646322,
|
2688 |
+
"grad_norm": NaN,
|
2689 |
+
"learning_rate": 6.651954450570508e-05,
|
2690 |
+
"loss": 0.0,
|
2691 |
+
"step": 382
|
2692 |
+
},
|
2693 |
+
{
|
2694 |
+
"epoch": 1.1987480438184663,
|
2695 |
+
"grad_norm": NaN,
|
2696 |
+
"learning_rate": 6.636289771569673e-05,
|
2697 |
+
"loss": 0.0,
|
2698 |
+
"step": 383
|
2699 |
+
},
|
2700 |
+
{
|
2701 |
+
"epoch": 1.2018779342723005,
|
2702 |
+
"grad_norm": NaN,
|
2703 |
+
"learning_rate": 6.620607084812538e-05,
|
2704 |
+
"loss": 0.0,
|
2705 |
+
"step": 384
|
2706 |
+
},
|
2707 |
+
{
|
2708 |
+
"epoch": 1.2050078247261347,
|
2709 |
+
"grad_norm": NaN,
|
2710 |
+
"learning_rate": 6.604906562890783e-05,
|
2711 |
+
"loss": 0.0,
|
2712 |
+
"step": 385
|
2713 |
+
},
|
2714 |
+
{
|
2715 |
+
"epoch": 1.2081377151799688,
|
2716 |
+
"grad_norm": NaN,
|
2717 |
+
"learning_rate": 6.589188378592369e-05,
|
2718 |
+
"loss": 0.0,
|
2719 |
+
"step": 386
|
2720 |
+
},
|
2721 |
+
{
|
2722 |
+
"epoch": 1.2112676056338028,
|
2723 |
+
"grad_norm": NaN,
|
2724 |
+
"learning_rate": 6.573452704899633e-05,
|
2725 |
+
"loss": 0.0,
|
2726 |
+
"step": 387
|
2727 |
+
},
|
2728 |
+
{
|
2729 |
+
"epoch": 1.214397496087637,
|
2730 |
+
"grad_norm": NaN,
|
2731 |
+
"learning_rate": 6.557699714987393e-05,
|
2732 |
+
"loss": 0.0,
|
2733 |
+
"step": 388
|
2734 |
+
},
|
2735 |
+
{
|
2736 |
+
"epoch": 1.217527386541471,
|
2737 |
+
"grad_norm": NaN,
|
2738 |
+
"learning_rate": 6.541929582221027e-05,
|
2739 |
+
"loss": 0.0,
|
2740 |
+
"step": 389
|
2741 |
+
},
|
2742 |
+
{
|
2743 |
+
"epoch": 1.2206572769953052,
|
2744 |
+
"grad_norm": NaN,
|
2745 |
+
"learning_rate": 6.526142480154584e-05,
|
2746 |
+
"loss": 0.0,
|
2747 |
+
"step": 390
|
2748 |
+
},
|
2749 |
+
{
|
2750 |
+
"epoch": 1.2237871674491392,
|
2751 |
+
"grad_norm": NaN,
|
2752 |
+
"learning_rate": 6.51033858252886e-05,
|
2753 |
+
"loss": 0.0,
|
2754 |
+
"step": 391
|
2755 |
+
},
|
2756 |
+
{
|
2757 |
+
"epoch": 1.2269170579029733,
|
2758 |
+
"grad_norm": NaN,
|
2759 |
+
"learning_rate": 6.494518063269486e-05,
|
2760 |
+
"loss": 0.0,
|
2761 |
+
"step": 392
|
2762 |
+
},
|
2763 |
+
{
|
2764 |
+
"epoch": 1.2300469483568075,
|
2765 |
+
"grad_norm": NaN,
|
2766 |
+
"learning_rate": 6.478681096485022e-05,
|
2767 |
+
"loss": 0.0,
|
2768 |
+
"step": 393
|
2769 |
+
},
|
2770 |
+
{
|
2771 |
+
"epoch": 1.2331768388106417,
|
2772 |
+
"grad_norm": NaN,
|
2773 |
+
"learning_rate": 6.46282785646504e-05,
|
2774 |
+
"loss": 0.0,
|
2775 |
+
"step": 394
|
2776 |
+
},
|
2777 |
+
{
|
2778 |
+
"epoch": 1.2363067292644758,
|
2779 |
+
"grad_norm": NaN,
|
2780 |
+
"learning_rate": 6.446958517678197e-05,
|
2781 |
+
"loss": 0.0,
|
2782 |
+
"step": 395
|
2783 |
+
},
|
2784 |
+
{
|
2785 |
+
"epoch": 1.2394366197183098,
|
2786 |
+
"grad_norm": NaN,
|
2787 |
+
"learning_rate": 6.431073254770323e-05,
|
2788 |
+
"loss": 0.0,
|
2789 |
+
"step": 396
|
2790 |
+
},
|
2791 |
+
{
|
2792 |
+
"epoch": 1.242566510172144,
|
2793 |
+
"grad_norm": NaN,
|
2794 |
+
"learning_rate": 6.415172242562497e-05,
|
2795 |
+
"loss": 0.0,
|
2796 |
+
"step": 397
|
2797 |
+
},
|
2798 |
+
{
|
2799 |
+
"epoch": 1.245696400625978,
|
2800 |
+
"grad_norm": NaN,
|
2801 |
+
"learning_rate": 6.399255656049125e-05,
|
2802 |
+
"loss": 0.0,
|
2803 |
+
"step": 398
|
2804 |
+
},
|
2805 |
+
{
|
2806 |
+
"epoch": 1.2488262910798122,
|
2807 |
+
"grad_norm": NaN,
|
2808 |
+
"learning_rate": 6.383323670396006e-05,
|
2809 |
+
"loss": 0.0,
|
2810 |
+
"step": 399
|
2811 |
+
},
|
2812 |
+
{
|
2813 |
+
"epoch": 1.2519561815336462,
|
2814 |
+
"grad_norm": NaN,
|
2815 |
+
"learning_rate": 6.367376460938416e-05,
|
2816 |
+
"loss": 0.0,
|
2817 |
+
"step": 400
|
2818 |
+
},
|
2819 |
+
{
|
2820 |
+
"epoch": 1.2550860719874803,
|
2821 |
+
"grad_norm": NaN,
|
2822 |
+
"learning_rate": 6.35141420317917e-05,
|
2823 |
+
"loss": 0.0,
|
2824 |
+
"step": 401
|
2825 |
+
},
|
2826 |
+
{
|
2827 |
+
"epoch": 1.2582159624413145,
|
2828 |
+
"grad_norm": NaN,
|
2829 |
+
"learning_rate": 6.335437072786692e-05,
|
2830 |
+
"loss": 0.0,
|
2831 |
+
"step": 402
|
2832 |
+
},
|
2833 |
+
{
|
2834 |
+
"epoch": 1.2613458528951487,
|
2835 |
+
"grad_norm": NaN,
|
2836 |
+
"learning_rate": 6.319445245593087e-05,
|
2837 |
+
"loss": 0.0,
|
2838 |
+
"step": 403
|
2839 |
+
},
|
2840 |
+
{
|
2841 |
+
"epoch": 1.2644757433489828,
|
2842 |
+
"grad_norm": NaN,
|
2843 |
+
"learning_rate": 6.303438897592197e-05,
|
2844 |
+
"loss": 0.0,
|
2845 |
+
"step": 404
|
2846 |
+
},
|
2847 |
+
{
|
2848 |
+
"epoch": 1.267605633802817,
|
2849 |
+
"grad_norm": NaN,
|
2850 |
+
"learning_rate": 6.28741820493767e-05,
|
2851 |
+
"loss": 0.0,
|
2852 |
+
"step": 405
|
2853 |
+
},
|
2854 |
+
{
|
2855 |
+
"epoch": 1.2707355242566511,
|
2856 |
+
"grad_norm": NaN,
|
2857 |
+
"learning_rate": 6.271383343941024e-05,
|
2858 |
+
"loss": 0.0,
|
2859 |
+
"step": 406
|
2860 |
+
},
|
2861 |
+
{
|
2862 |
+
"epoch": 1.273865414710485,
|
2863 |
+
"grad_norm": NaN,
|
2864 |
+
"learning_rate": 6.2553344910697e-05,
|
2865 |
+
"loss": 0.0,
|
2866 |
+
"step": 407
|
2867 |
+
},
|
2868 |
+
{
|
2869 |
+
"epoch": 1.2769953051643192,
|
2870 |
+
"grad_norm": NaN,
|
2871 |
+
"learning_rate": 6.23927182294512e-05,
|
2872 |
+
"loss": 0.0,
|
2873 |
+
"step": 408
|
2874 |
+
},
|
2875 |
+
{
|
2876 |
+
"epoch": 1.2801251956181534,
|
2877 |
+
"grad_norm": NaN,
|
2878 |
+
"learning_rate": 6.223195516340752e-05,
|
2879 |
+
"loss": 0.0,
|
2880 |
+
"step": 409
|
2881 |
+
},
|
2882 |
+
{
|
2883 |
+
"epoch": 1.2832550860719873,
|
2884 |
+
"grad_norm": NaN,
|
2885 |
+
"learning_rate": 6.207105748180158e-05,
|
2886 |
+
"loss": 0.0,
|
2887 |
+
"step": 410
|
2888 |
+
},
|
2889 |
+
{
|
2890 |
+
"epoch": 1.2863849765258215,
|
2891 |
+
"grad_norm": NaN,
|
2892 |
+
"learning_rate": 6.19100269553504e-05,
|
2893 |
+
"loss": 0.0,
|
2894 |
+
"step": 411
|
2895 |
+
},
|
2896 |
+
{
|
2897 |
+
"epoch": 1.2895148669796557,
|
2898 |
+
"grad_norm": NaN,
|
2899 |
+
"learning_rate": 6.174886535623307e-05,
|
2900 |
+
"loss": 0.0,
|
2901 |
+
"step": 412
|
2902 |
+
},
|
2903 |
+
{
|
2904 |
+
"epoch": 1.2926447574334898,
|
2905 |
+
"grad_norm": NaN,
|
2906 |
+
"learning_rate": 6.158757445807118e-05,
|
2907 |
+
"loss": 0.0,
|
2908 |
+
"step": 413
|
2909 |
+
},
|
2910 |
+
{
|
2911 |
+
"epoch": 1.295774647887324,
|
2912 |
+
"grad_norm": NaN,
|
2913 |
+
"learning_rate": 6.142615603590919e-05,
|
2914 |
+
"loss": 0.0,
|
2915 |
+
"step": 414
|
2916 |
+
},
|
2917 |
+
{
|
2918 |
+
"epoch": 1.2989045383411582,
|
2919 |
+
"grad_norm": NaN,
|
2920 |
+
"learning_rate": 6.126461186619508e-05,
|
2921 |
+
"loss": 0.0,
|
2922 |
+
"step": 415
|
2923 |
+
},
|
2924 |
+
{
|
2925 |
+
"epoch": 1.302034428794992,
|
2926 |
+
"grad_norm": NaN,
|
2927 |
+
"learning_rate": 6.110294372676065e-05,
|
2928 |
+
"loss": 0.0,
|
2929 |
+
"step": 416
|
2930 |
+
},
|
2931 |
+
{
|
2932 |
+
"epoch": 1.3051643192488263,
|
2933 |
+
"grad_norm": NaN,
|
2934 |
+
"learning_rate": 6.094115339680209e-05,
|
2935 |
+
"loss": 0.0,
|
2936 |
+
"step": 417
|
2937 |
+
},
|
2938 |
+
{
|
2939 |
+
"epoch": 1.3082942097026604,
|
2940 |
+
"grad_norm": NaN,
|
2941 |
+
"learning_rate": 6.077924265686027e-05,
|
2942 |
+
"loss": 0.0,
|
2943 |
+
"step": 418
|
2944 |
+
},
|
2945 |
+
{
|
2946 |
+
"epoch": 1.3114241001564946,
|
2947 |
+
"grad_norm": NaN,
|
2948 |
+
"learning_rate": 6.061721328880119e-05,
|
2949 |
+
"loss": 0.0,
|
2950 |
+
"step": 419
|
2951 |
+
},
|
2952 |
+
{
|
2953 |
+
"epoch": 1.3145539906103285,
|
2954 |
+
"grad_norm": NaN,
|
2955 |
+
"learning_rate": 6.0455067075796424e-05,
|
2956 |
+
"loss": 0.0,
|
2957 |
+
"step": 420
|
2958 |
+
},
|
2959 |
+
{
|
2960 |
+
"epoch": 1.3176838810641627,
|
2961 |
+
"grad_norm": NaN,
|
2962 |
+
"learning_rate": 6.029280580230342e-05,
|
2963 |
+
"loss": 0.0,
|
2964 |
+
"step": 421
|
2965 |
+
},
|
2966 |
+
{
|
2967 |
+
"epoch": 1.3208137715179968,
|
2968 |
+
"grad_norm": NaN,
|
2969 |
+
"learning_rate": 6.0130431254045904e-05,
|
2970 |
+
"loss": 0.0,
|
2971 |
+
"step": 422
|
2972 |
+
},
|
2973 |
+
{
|
2974 |
+
"epoch": 1.323943661971831,
|
2975 |
+
"grad_norm": NaN,
|
2976 |
+
"learning_rate": 5.996794521799425e-05,
|
2977 |
+
"loss": 0.0,
|
2978 |
+
"step": 423
|
2979 |
+
},
|
2980 |
+
{
|
2981 |
+
"epoch": 1.3270735524256652,
|
2982 |
+
"grad_norm": NaN,
|
2983 |
+
"learning_rate": 5.9805349482345706e-05,
|
2984 |
+
"loss": 0.0,
|
2985 |
+
"step": 424
|
2986 |
+
},
|
2987 |
+
{
|
2988 |
+
"epoch": 1.3302034428794993,
|
2989 |
+
"grad_norm": NaN,
|
2990 |
+
"learning_rate": 5.964264583650486e-05,
|
2991 |
+
"loss": 0.0,
|
2992 |
+
"step": 425
|
2993 |
+
},
|
2994 |
+
{
|
2995 |
+
"epoch": 1.3333333333333333,
|
2996 |
+
"grad_norm": NaN,
|
2997 |
+
"learning_rate": 5.947983607106385e-05,
|
2998 |
+
"loss": 0.0,
|
2999 |
+
"step": 426
|
3000 |
+
},
|
3001 |
+
{
|
3002 |
+
"epoch": 1.3364632237871674,
|
3003 |
+
"grad_norm": NaN,
|
3004 |
+
"learning_rate": 5.931692197778269e-05,
|
3005 |
+
"loss": 0.0,
|
3006 |
+
"step": 427
|
3007 |
+
},
|
3008 |
+
{
|
3009 |
+
"epoch": 1.3395931142410016,
|
3010 |
+
"grad_norm": NaN,
|
3011 |
+
"learning_rate": 5.9153905349569525e-05,
|
3012 |
+
"loss": 0.0,
|
3013 |
+
"step": 428
|
3014 |
+
},
|
3015 |
+
{
|
3016 |
+
"epoch": 1.3427230046948357,
|
3017 |
+
"grad_norm": NaN,
|
3018 |
+
"learning_rate": 5.8990787980460974e-05,
|
3019 |
+
"loss": 0.0,
|
3020 |
+
"step": 429
|
3021 |
+
},
|
3022 |
+
{
|
3023 |
+
"epoch": 1.3458528951486697,
|
3024 |
+
"grad_norm": NaN,
|
3025 |
+
"learning_rate": 5.882757166560226e-05,
|
3026 |
+
"loss": 0.0,
|
3027 |
+
"step": 430
|
3028 |
+
},
|
3029 |
+
{
|
3030 |
+
"epoch": 1.3489827856025038,
|
3031 |
+
"grad_norm": NaN,
|
3032 |
+
"learning_rate": 5.866425820122758e-05,
|
3033 |
+
"loss": 0.0,
|
3034 |
+
"step": 431
|
3035 |
+
},
|
3036 |
+
{
|
3037 |
+
"epoch": 1.352112676056338,
|
3038 |
+
"grad_norm": NaN,
|
3039 |
+
"learning_rate": 5.8500849384640285e-05,
|
3040 |
+
"loss": 0.0,
|
3041 |
+
"step": 432
|
3042 |
+
},
|
3043 |
+
{
|
3044 |
+
"epoch": 1.3552425665101722,
|
3045 |
+
"grad_norm": NaN,
|
3046 |
+
"learning_rate": 5.833734701419308e-05,
|
3047 |
+
"loss": 0.0,
|
3048 |
+
"step": 433
|
3049 |
+
},
|
3050 |
+
{
|
3051 |
+
"epoch": 1.3583724569640063,
|
3052 |
+
"grad_norm": NaN,
|
3053 |
+
"learning_rate": 5.817375288926825e-05,
|
3054 |
+
"loss": 0.0,
|
3055 |
+
"step": 434
|
3056 |
+
},
|
3057 |
+
{
|
3058 |
+
"epoch": 1.3615023474178405,
|
3059 |
+
"grad_norm": NaN,
|
3060 |
+
"learning_rate": 5.801006881025788e-05,
|
3061 |
+
"loss": 0.0,
|
3062 |
+
"step": 435
|
3063 |
+
},
|
3064 |
+
{
|
3065 |
+
"epoch": 1.3646322378716744,
|
3066 |
+
"grad_norm": NaN,
|
3067 |
+
"learning_rate": 5.784629657854399e-05,
|
3068 |
+
"loss": 0.0,
|
3069 |
+
"step": 436
|
3070 |
+
},
|
3071 |
+
{
|
3072 |
+
"epoch": 1.3677621283255086,
|
3073 |
+
"grad_norm": NaN,
|
3074 |
+
"learning_rate": 5.768243799647879e-05,
|
3075 |
+
"loss": 0.0,
|
3076 |
+
"step": 437
|
3077 |
+
},
|
3078 |
+
{
|
3079 |
+
"epoch": 1.3708920187793427,
|
3080 |
+
"grad_norm": NaN,
|
3081 |
+
"learning_rate": 5.7518494867364725e-05,
|
3082 |
+
"loss": 0.0,
|
3083 |
+
"step": 438
|
3084 |
+
},
|
3085 |
+
{
|
3086 |
+
"epoch": 1.374021909233177,
|
3087 |
+
"grad_norm": NaN,
|
3088 |
+
"learning_rate": 5.7354468995434794e-05,
|
3089 |
+
"loss": 0.0,
|
3090 |
+
"step": 439
|
3091 |
+
},
|
3092 |
+
{
|
3093 |
+
"epoch": 1.3771517996870108,
|
3094 |
+
"grad_norm": NaN,
|
3095 |
+
"learning_rate": 5.71903621858325e-05,
|
3096 |
+
"loss": 0.0,
|
3097 |
+
"step": 440
|
3098 |
+
},
|
3099 |
+
{
|
3100 |
+
"epoch": 1.380281690140845,
|
3101 |
+
"grad_norm": NaN,
|
3102 |
+
"learning_rate": 5.7026176244592155e-05,
|
3103 |
+
"loss": 0.0,
|
3104 |
+
"step": 441
|
3105 |
+
},
|
3106 |
+
{
|
3107 |
+
"epoch": 1.3834115805946792,
|
3108 |
+
"grad_norm": NaN,
|
3109 |
+
"learning_rate": 5.686191297861892e-05,
|
3110 |
+
"loss": 0.0,
|
3111 |
+
"step": 442
|
3112 |
+
},
|
3113 |
+
{
|
3114 |
+
"epoch": 1.3865414710485133,
|
3115 |
+
"grad_norm": NaN,
|
3116 |
+
"learning_rate": 5.66975741956689e-05,
|
3117 |
+
"loss": 0.0,
|
3118 |
+
"step": 443
|
3119 |
+
},
|
3120 |
+
{
|
3121 |
+
"epoch": 1.3896713615023475,
|
3122 |
+
"grad_norm": NaN,
|
3123 |
+
"learning_rate": 5.653316170432932e-05,
|
3124 |
+
"loss": 0.0,
|
3125 |
+
"step": 444
|
3126 |
+
},
|
3127 |
+
{
|
3128 |
+
"epoch": 1.3928012519561817,
|
3129 |
+
"grad_norm": NaN,
|
3130 |
+
"learning_rate": 5.636867731399856e-05,
|
3131 |
+
"loss": 0.0,
|
3132 |
+
"step": 445
|
3133 |
+
},
|
3134 |
+
{
|
3135 |
+
"epoch": 1.3959311424100156,
|
3136 |
+
"grad_norm": NaN,
|
3137 |
+
"learning_rate": 5.620412283486629e-05,
|
3138 |
+
"loss": 0.0,
|
3139 |
+
"step": 446
|
3140 |
+
},
|
3141 |
+
{
|
3142 |
+
"epoch": 1.3990610328638498,
|
3143 |
+
"grad_norm": NaN,
|
3144 |
+
"learning_rate": 5.603950007789349e-05,
|
3145 |
+
"loss": 0.0,
|
3146 |
+
"step": 447
|
3147 |
+
},
|
3148 |
+
{
|
3149 |
+
"epoch": 1.402190923317684,
|
3150 |
+
"grad_norm": NaN,
|
3151 |
+
"learning_rate": 5.5874810854792606e-05,
|
3152 |
+
"loss": 0.0,
|
3153 |
+
"step": 448
|
3154 |
+
},
|
3155 |
+
{
|
3156 |
+
"epoch": 1.405320813771518,
|
3157 |
+
"grad_norm": NaN,
|
3158 |
+
"learning_rate": 5.571005697800748e-05,
|
3159 |
+
"loss": 0.0,
|
3160 |
+
"step": 449
|
3161 |
+
},
|
3162 |
+
{
|
3163 |
+
"epoch": 1.408450704225352,
|
3164 |
+
"grad_norm": NaN,
|
3165 |
+
"learning_rate": 5.554524026069354e-05,
|
3166 |
+
"loss": 0.0,
|
3167 |
+
"step": 450
|
3168 |
+
},
|
3169 |
+
{
|
3170 |
+
"epoch": 1.4115805946791862,
|
3171 |
+
"grad_norm": NaN,
|
3172 |
+
"learning_rate": 5.5380362516697794e-05,
|
3173 |
+
"loss": 0.0,
|
3174 |
+
"step": 451
|
3175 |
+
},
|
3176 |
+
{
|
3177 |
+
"epoch": 1.4147104851330203,
|
3178 |
+
"grad_norm": NaN,
|
3179 |
+
"learning_rate": 5.521542556053885e-05,
|
3180 |
+
"loss": 0.0,
|
3181 |
+
"step": 452
|
3182 |
+
},
|
3183 |
+
{
|
3184 |
+
"epoch": 1.4178403755868545,
|
3185 |
+
"grad_norm": NaN,
|
3186 |
+
"learning_rate": 5.505043120738693e-05,
|
3187 |
+
"loss": 0.0,
|
3188 |
+
"step": 453
|
3189 |
+
},
|
3190 |
+
{
|
3191 |
+
"epoch": 1.4209702660406887,
|
3192 |
+
"grad_norm": NaN,
|
3193 |
+
"learning_rate": 5.488538127304399e-05,
|
3194 |
+
"loss": 0.0,
|
3195 |
+
"step": 454
|
3196 |
+
},
|
3197 |
+
{
|
3198 |
+
"epoch": 1.4241001564945228,
|
3199 |
+
"grad_norm": NaN,
|
3200 |
+
"learning_rate": 5.4720277573923595e-05,
|
3201 |
+
"loss": 0.0,
|
3202 |
+
"step": 455
|
3203 |
+
},
|
3204 |
+
{
|
3205 |
+
"epoch": 1.4272300469483568,
|
3206 |
+
"grad_norm": NaN,
|
3207 |
+
"learning_rate": 5.455512192703105e-05,
|
3208 |
+
"loss": 0.0,
|
3209 |
+
"step": 456
|
3210 |
+
},
|
3211 |
+
{
|
3212 |
+
"epoch": 1.430359937402191,
|
3213 |
+
"grad_norm": NaN,
|
3214 |
+
"learning_rate": 5.4389916149943354e-05,
|
3215 |
+
"loss": 0.0,
|
3216 |
+
"step": 457
|
3217 |
+
},
|
3218 |
+
{
|
3219 |
+
"epoch": 1.433489827856025,
|
3220 |
+
"grad_norm": NaN,
|
3221 |
+
"learning_rate": 5.422466206078919e-05,
|
3222 |
+
"loss": 0.0,
|
3223 |
+
"step": 458
|
3224 |
+
},
|
3225 |
+
{
|
3226 |
+
"epoch": 1.436619718309859,
|
3227 |
+
"grad_norm": NaN,
|
3228 |
+
"learning_rate": 5.405936147822892e-05,
|
3229 |
+
"loss": 0.0,
|
3230 |
+
"step": 459
|
3231 |
+
},
|
3232 |
+
{
|
3233 |
+
"epoch": 1.4397496087636932,
|
3234 |
+
"grad_norm": NaN,
|
3235 |
+
"learning_rate": 5.3894016221434605e-05,
|
3236 |
+
"loss": 0.0,
|
3237 |
+
"step": 460
|
3238 |
+
},
|
3239 |
+
{
|
3240 |
+
"epoch": 1.4428794992175273,
|
3241 |
+
"grad_norm": NaN,
|
3242 |
+
"learning_rate": 5.372862811006992e-05,
|
3243 |
+
"loss": 0.0,
|
3244 |
+
"step": 461
|
3245 |
+
},
|
3246 |
+
{
|
3247 |
+
"epoch": 1.4460093896713615,
|
3248 |
+
"grad_norm": NaN,
|
3249 |
+
"learning_rate": 5.35631989642702e-05,
|
3250 |
+
"loss": 0.0,
|
3251 |
+
"step": 462
|
3252 |
+
},
|
3253 |
+
{
|
3254 |
+
"epoch": 1.4491392801251957,
|
3255 |
+
"grad_norm": NaN,
|
3256 |
+
"learning_rate": 5.3397730604622344e-05,
|
3257 |
+
"loss": 0.0,
|
3258 |
+
"step": 463
|
3259 |
+
},
|
3260 |
+
{
|
3261 |
+
"epoch": 1.4522691705790298,
|
3262 |
+
"grad_norm": NaN,
|
3263 |
+
"learning_rate": 5.323222485214484e-05,
|
3264 |
+
"loss": 0.0,
|
3265 |
+
"step": 464
|
3266 |
+
},
|
3267 |
+
{
|
3268 |
+
"epoch": 1.455399061032864,
|
3269 |
+
"grad_norm": NaN,
|
3270 |
+
"learning_rate": 5.306668352826765e-05,
|
3271 |
+
"loss": 0.0,
|
3272 |
+
"step": 465
|
3273 |
+
},
|
3274 |
+
{
|
3275 |
+
"epoch": 1.458528951486698,
|
3276 |
+
"grad_norm": NaN,
|
3277 |
+
"learning_rate": 5.290110845481224e-05,
|
3278 |
+
"loss": 0.0,
|
3279 |
+
"step": 466
|
3280 |
+
},
|
3281 |
+
{
|
3282 |
+
"epoch": 1.461658841940532,
|
3283 |
+
"grad_norm": NaN,
|
3284 |
+
"learning_rate": 5.2735501453971516e-05,
|
3285 |
+
"loss": 0.0,
|
3286 |
+
"step": 467
|
3287 |
+
},
|
3288 |
+
{
|
3289 |
+
"epoch": 1.4647887323943662,
|
3290 |
+
"grad_norm": NaN,
|
3291 |
+
"learning_rate": 5.256986434828969e-05,
|
3292 |
+
"loss": 0.0,
|
3293 |
+
"step": 468
|
3294 |
+
},
|
3295 |
+
{
|
3296 |
+
"epoch": 1.4679186228482002,
|
3297 |
+
"grad_norm": NaN,
|
3298 |
+
"learning_rate": 5.240419896064235e-05,
|
3299 |
+
"loss": 0.0,
|
3300 |
+
"step": 469
|
3301 |
+
},
|
3302 |
+
{
|
3303 |
+
"epoch": 1.4710485133020343,
|
3304 |
+
"grad_norm": NaN,
|
3305 |
+
"learning_rate": 5.2238507114216285e-05,
|
3306 |
+
"loss": 0.0,
|
3307 |
+
"step": 470
|
3308 |
+
},
|
3309 |
+
{
|
3310 |
+
"epoch": 1.4741784037558685,
|
3311 |
+
"grad_norm": NaN,
|
3312 |
+
"learning_rate": 5.2072790632489497e-05,
|
3313 |
+
"loss": 0.0,
|
3314 |
+
"step": 471
|
3315 |
+
},
|
3316 |
+
{
|
3317 |
+
"epoch": 1.4773082942097027,
|
3318 |
+
"grad_norm": NaN,
|
3319 |
+
"learning_rate": 5.19070513392111e-05,
|
3320 |
+
"loss": 0.0,
|
3321 |
+
"step": 472
|
3322 |
+
},
|
3323 |
+
{
|
3324 |
+
"epoch": 1.4804381846635368,
|
3325 |
+
"grad_norm": NaN,
|
3326 |
+
"learning_rate": 5.174129105838127e-05,
|
3327 |
+
"loss": 0.0,
|
3328 |
+
"step": 473
|
3329 |
+
},
|
3330 |
+
{
|
3331 |
+
"epoch": 1.483568075117371,
|
3332 |
+
"grad_norm": NaN,
|
3333 |
+
"learning_rate": 5.1575511614231096e-05,
|
3334 |
+
"loss": 0.0,
|
3335 |
+
"step": 474
|
3336 |
+
},
|
3337 |
+
{
|
3338 |
+
"epoch": 1.486697965571205,
|
3339 |
+
"grad_norm": NaN,
|
3340 |
+
"learning_rate": 5.140971483120265e-05,
|
3341 |
+
"loss": 0.0,
|
3342 |
+
"step": 475
|
3343 |
+
},
|
3344 |
+
{
|
3345 |
+
"epoch": 1.489827856025039,
|
3346 |
+
"grad_norm": NaN,
|
3347 |
+
"learning_rate": 5.1243902533928754e-05,
|
3348 |
+
"loss": 0.0,
|
3349 |
+
"step": 476
|
3350 |
+
},
|
3351 |
+
{
|
3352 |
+
"epoch": 1.4929577464788732,
|
3353 |
+
"grad_norm": NaN,
|
3354 |
+
"learning_rate": 5.1078076547212994e-05,
|
3355 |
+
"loss": 0.0,
|
3356 |
+
"step": 477
|
3357 |
+
},
|
3358 |
+
{
|
3359 |
+
"epoch": 1.4960876369327074,
|
3360 |
+
"grad_norm": NaN,
|
3361 |
+
"learning_rate": 5.0912238696009616e-05,
|
3362 |
+
"loss": 0.0,
|
3363 |
+
"step": 478
|
3364 |
+
},
|
3365 |
+
{
|
3366 |
+
"epoch": 1.4992175273865413,
|
3367 |
+
"grad_norm": NaN,
|
3368 |
+
"learning_rate": 5.0746390805403445e-05,
|
3369 |
+
"loss": 0.0,
|
3370 |
+
"step": 479
|
3371 |
+
},
|
3372 |
+
{
|
3373 |
+
"epoch": 1.5023474178403755,
|
3374 |
+
"grad_norm": NaN,
|
3375 |
+
"learning_rate": 5.058053470058974e-05,
|
3376 |
+
"loss": 0.0,
|
3377 |
+
"step": 480
|
3378 |
+
},
|
3379 |
+
{
|
3380 |
+
"epoch": 1.5054773082942097,
|
3381 |
+
"grad_norm": NaN,
|
3382 |
+
"learning_rate": 5.041467220685424e-05,
|
3383 |
+
"loss": 0.0,
|
3384 |
+
"step": 481
|
3385 |
+
},
|
3386 |
+
{
|
3387 |
+
"epoch": 1.5086071987480438,
|
3388 |
+
"grad_norm": NaN,
|
3389 |
+
"learning_rate": 5.024880514955292e-05,
|
3390 |
+
"loss": 0.0,
|
3391 |
+
"step": 482
|
3392 |
+
},
|
3393 |
+
{
|
3394 |
+
"epoch": 1.511737089201878,
|
3395 |
+
"grad_norm": NaN,
|
3396 |
+
"learning_rate": 5.0082935354092044e-05,
|
3397 |
+
"loss": 0.0,
|
3398 |
+
"step": 483
|
3399 |
+
},
|
3400 |
+
{
|
3401 |
+
"epoch": 1.5148669796557122,
|
3402 |
+
"grad_norm": NaN,
|
3403 |
+
"learning_rate": 4.991706464590797e-05,
|
3404 |
+
"loss": 0.0,
|
3405 |
+
"step": 484
|
3406 |
+
},
|
3407 |
+
{
|
3408 |
+
"epoch": 1.5179968701095463,
|
3409 |
+
"grad_norm": NaN,
|
3410 |
+
"learning_rate": 4.975119485044709e-05,
|
3411 |
+
"loss": 0.0,
|
3412 |
+
"step": 485
|
3413 |
+
},
|
3414 |
+
{
|
3415 |
+
"epoch": 1.5211267605633803,
|
3416 |
+
"grad_norm": NaN,
|
3417 |
+
"learning_rate": 4.958532779314578e-05,
|
3418 |
+
"loss": 0.0,
|
3419 |
+
"step": 486
|
3420 |
+
},
|
3421 |
+
{
|
3422 |
+
"epoch": 1.5242566510172144,
|
3423 |
+
"grad_norm": NaN,
|
3424 |
+
"learning_rate": 4.9419465299410264e-05,
|
3425 |
+
"loss": 0.0,
|
3426 |
+
"step": 487
|
3427 |
+
},
|
3428 |
+
{
|
3429 |
+
"epoch": 1.5273865414710484,
|
3430 |
+
"grad_norm": NaN,
|
3431 |
+
"learning_rate": 4.9253609194596574e-05,
|
3432 |
+
"loss": 0.0,
|
3433 |
+
"step": 488
|
3434 |
+
},
|
3435 |
+
{
|
3436 |
+
"epoch": 1.5305164319248825,
|
3437 |
+
"grad_norm": NaN,
|
3438 |
+
"learning_rate": 4.908776130399039e-05,
|
3439 |
+
"loss": 0.0,
|
3440 |
+
"step": 489
|
3441 |
+
},
|
3442 |
+
{
|
3443 |
+
"epoch": 1.5336463223787167,
|
3444 |
+
"grad_norm": NaN,
|
3445 |
+
"learning_rate": 4.8921923452787025e-05,
|
3446 |
+
"loss": 0.0,
|
3447 |
+
"step": 490
|
3448 |
+
},
|
3449 |
+
{
|
3450 |
+
"epoch": 1.5367762128325508,
|
3451 |
+
"grad_norm": NaN,
|
3452 |
+
"learning_rate": 4.875609746607127e-05,
|
3453 |
+
"loss": 0.0,
|
3454 |
+
"step": 491
|
3455 |
+
},
|
3456 |
+
{
|
3457 |
+
"epoch": 1.539906103286385,
|
3458 |
+
"grad_norm": NaN,
|
3459 |
+
"learning_rate": 4.859028516879737e-05,
|
3460 |
+
"loss": 0.0,
|
3461 |
+
"step": 492
|
3462 |
+
},
|
3463 |
+
{
|
3464 |
+
"epoch": 1.5430359937402192,
|
3465 |
+
"grad_norm": NaN,
|
3466 |
+
"learning_rate": 4.842448838576891e-05,
|
3467 |
+
"loss": 0.0,
|
3468 |
+
"step": 493
|
3469 |
+
},
|
3470 |
+
{
|
3471 |
+
"epoch": 1.5461658841940533,
|
3472 |
+
"grad_norm": NaN,
|
3473 |
+
"learning_rate": 4.825870894161874e-05,
|
3474 |
+
"loss": 0.0,
|
3475 |
+
"step": 494
|
3476 |
+
},
|
3477 |
+
{
|
3478 |
+
"epoch": 1.5492957746478875,
|
3479 |
+
"grad_norm": NaN,
|
3480 |
+
"learning_rate": 4.8092948660788894e-05,
|
3481 |
+
"loss": 0.0,
|
3482 |
+
"step": 495
|
3483 |
+
},
|
3484 |
+
{
|
3485 |
+
"epoch": 1.5524256651017214,
|
3486 |
+
"grad_norm": NaN,
|
3487 |
+
"learning_rate": 4.79272093675105e-05,
|
3488 |
+
"loss": 0.0,
|
3489 |
+
"step": 496
|
3490 |
+
},
|
3491 |
+
{
|
3492 |
+
"epoch": 1.5555555555555556,
|
3493 |
+
"grad_norm": NaN,
|
3494 |
+
"learning_rate": 4.7761492885783734e-05,
|
3495 |
+
"loss": 0.0,
|
3496 |
+
"step": 497
|
3497 |
+
},
|
3498 |
+
{
|
3499 |
+
"epoch": 1.5586854460093895,
|
3500 |
+
"grad_norm": NaN,
|
3501 |
+
"learning_rate": 4.759580103935767e-05,
|
3502 |
+
"loss": 0.0,
|
3503 |
+
"step": 498
|
3504 |
+
},
|
3505 |
+
{
|
3506 |
+
"epoch": 1.5618153364632237,
|
3507 |
+
"grad_norm": NaN,
|
3508 |
+
"learning_rate": 4.7430135651710315e-05,
|
3509 |
+
"loss": 0.0,
|
3510 |
+
"step": 499
|
3511 |
+
},
|
3512 |
+
{
|
3513 |
+
"epoch": 1.5649452269170578,
|
3514 |
+
"grad_norm": NaN,
|
3515 |
+
"learning_rate": 4.7264498546028496e-05,
|
3516 |
+
"loss": 0.0,
|
3517 |
+
"step": 500
|
3518 |
+
},
|
3519 |
+
{
|
3520 |
+
"epoch": 1.568075117370892,
|
3521 |
+
"grad_norm": NaN,
|
3522 |
+
"learning_rate": 4.7098891545187755e-05,
|
3523 |
+
"loss": 0.0,
|
3524 |
+
"step": 501
|
3525 |
+
},
|
3526 |
+
{
|
3527 |
+
"epoch": 1.5712050078247262,
|
3528 |
+
"grad_norm": NaN,
|
3529 |
+
"learning_rate": 4.693331647173235e-05,
|
3530 |
+
"loss": 0.0,
|
3531 |
+
"step": 502
|
3532 |
+
},
|
3533 |
+
{
|
3534 |
+
"epoch": 1.5743348982785603,
|
3535 |
+
"grad_norm": NaN,
|
3536 |
+
"learning_rate": 4.6767775147855186e-05,
|
3537 |
+
"loss": 0.0,
|
3538 |
+
"step": 503
|
3539 |
+
},
|
3540 |
+
{
|
3541 |
+
"epoch": 1.5774647887323945,
|
3542 |
+
"grad_norm": NaN,
|
3543 |
+
"learning_rate": 4.660226939537767e-05,
|
3544 |
+
"loss": 0.0,
|
3545 |
+
"step": 504
|
3546 |
+
},
|
3547 |
+
{
|
3548 |
+
"epoch": 1.5805946791862286,
|
3549 |
+
"grad_norm": NaN,
|
3550 |
+
"learning_rate": 4.643680103572981e-05,
|
3551 |
+
"loss": 0.0,
|
3552 |
+
"step": 505
|
3553 |
+
},
|
3554 |
+
{
|
3555 |
+
"epoch": 1.5837245696400626,
|
3556 |
+
"grad_norm": NaN,
|
3557 |
+
"learning_rate": 4.627137188993009e-05,
|
3558 |
+
"loss": 0.0,
|
3559 |
+
"step": 506
|
3560 |
+
},
|
3561 |
+
{
|
3562 |
+
"epoch": 1.5868544600938967,
|
3563 |
+
"grad_norm": NaN,
|
3564 |
+
"learning_rate": 4.6105983778565406e-05,
|
3565 |
+
"loss": 0.0,
|
3566 |
+
"step": 507
|
3567 |
+
},
|
3568 |
+
{
|
3569 |
+
"epoch": 1.5899843505477307,
|
3570 |
+
"grad_norm": NaN,
|
3571 |
+
"learning_rate": 4.594063852177108e-05,
|
3572 |
+
"loss": 0.0,
|
3573 |
+
"step": 508
|
3574 |
+
},
|
3575 |
+
{
|
3576 |
+
"epoch": 1.5931142410015648,
|
3577 |
+
"grad_norm": NaN,
|
3578 |
+
"learning_rate": 4.577533793921083e-05,
|
3579 |
+
"loss": 0.0,
|
3580 |
+
"step": 509
|
3581 |
+
},
|
3582 |
+
{
|
3583 |
+
"epoch": 1.596244131455399,
|
3584 |
+
"grad_norm": NaN,
|
3585 |
+
"learning_rate": 4.5610083850056664e-05,
|
3586 |
+
"loss": 0.0,
|
3587 |
+
"step": 510
|
3588 |
+
},
|
3589 |
+
{
|
3590 |
+
"epoch": 1.5993740219092332,
|
3591 |
+
"grad_norm": NaN,
|
3592 |
+
"learning_rate": 4.5444878072968963e-05,
|
3593 |
+
"loss": 0.0,
|
3594 |
+
"step": 511
|
3595 |
+
},
|
3596 |
+
{
|
3597 |
+
"epoch": 1.6025039123630673,
|
3598 |
+
"grad_norm": NaN,
|
3599 |
+
"learning_rate": 4.527972242607642e-05,
|
3600 |
+
"loss": 0.0,
|
3601 |
+
"step": 512
|
3602 |
+
},
|
3603 |
+
{
|
3604 |
+
"epoch": 1.6056338028169015,
|
3605 |
+
"grad_norm": NaN,
|
3606 |
+
"learning_rate": 4.511461872695602e-05,
|
3607 |
+
"loss": 0.0,
|
3608 |
+
"step": 513
|
3609 |
+
},
|
3610 |
+
{
|
3611 |
+
"epoch": 1.6087636932707357,
|
3612 |
+
"grad_norm": NaN,
|
3613 |
+
"learning_rate": 4.4949568792613066e-05,
|
3614 |
+
"loss": 0.0,
|
3615 |
+
"step": 514
|
3616 |
+
},
|
3617 |
+
{
|
3618 |
+
"epoch": 1.6118935837245696,
|
3619 |
+
"grad_norm": NaN,
|
3620 |
+
"learning_rate": 4.478457443946117e-05,
|
3621 |
+
"loss": 0.0,
|
3622 |
+
"step": 515
|
3623 |
+
},
|
3624 |
+
{
|
3625 |
+
"epoch": 1.6150234741784038,
|
3626 |
+
"grad_norm": NaN,
|
3627 |
+
"learning_rate": 4.461963748330221e-05,
|
3628 |
+
"loss": 0.0,
|
3629 |
+
"step": 516
|
3630 |
+
},
|
3631 |
+
{
|
3632 |
+
"epoch": 1.618153364632238,
|
3633 |
+
"grad_norm": NaN,
|
3634 |
+
"learning_rate": 4.445475973930647e-05,
|
3635 |
+
"loss": 0.0,
|
3636 |
+
"step": 517
|
3637 |
+
},
|
3638 |
+
{
|
3639 |
+
"epoch": 1.6212832550860719,
|
3640 |
+
"grad_norm": NaN,
|
3641 |
+
"learning_rate": 4.428994302199254e-05,
|
3642 |
+
"loss": 0.0,
|
3643 |
+
"step": 518
|
3644 |
+
},
|
3645 |
+
{
|
3646 |
+
"epoch": 1.624413145539906,
|
3647 |
+
"grad_norm": NaN,
|
3648 |
+
"learning_rate": 4.412518914520741e-05,
|
3649 |
+
"loss": 0.0,
|
3650 |
+
"step": 519
|
3651 |
+
},
|
3652 |
+
{
|
3653 |
+
"epoch": 1.6275430359937402,
|
3654 |
+
"grad_norm": NaN,
|
3655 |
+
"learning_rate": 4.396049992210652e-05,
|
3656 |
+
"loss": 0.0,
|
3657 |
+
"step": 520
|
3658 |
+
},
|
3659 |
+
{
|
3660 |
+
"epoch": 1.6306729264475743,
|
3661 |
+
"grad_norm": NaN,
|
3662 |
+
"learning_rate": 4.379587716513372e-05,
|
3663 |
+
"loss": 0.0,
|
3664 |
+
"step": 521
|
3665 |
+
},
|
3666 |
+
{
|
3667 |
+
"epoch": 1.6338028169014085,
|
3668 |
+
"grad_norm": NaN,
|
3669 |
+
"learning_rate": 4.363132268600145e-05,
|
3670 |
+
"loss": 0.0,
|
3671 |
+
"step": 522
|
3672 |
+
},
|
3673 |
+
{
|
3674 |
+
"epoch": 1.6369327073552427,
|
3675 |
+
"grad_norm": NaN,
|
3676 |
+
"learning_rate": 4.34668382956707e-05,
|
3677 |
+
"loss": 0.0,
|
3678 |
+
"step": 523
|
3679 |
+
},
|
3680 |
+
{
|
3681 |
+
"epoch": 1.6400625978090768,
|
3682 |
+
"grad_norm": NaN,
|
3683 |
+
"learning_rate": 4.330242580433111e-05,
|
3684 |
+
"loss": 0.0,
|
3685 |
+
"step": 524
|
3686 |
+
},
|
3687 |
+
{
|
3688 |
+
"epoch": 1.6431924882629108,
|
3689 |
+
"grad_norm": NaN,
|
3690 |
+
"learning_rate": 4.313808702138109e-05,
|
3691 |
+
"loss": 0.0,
|
3692 |
+
"step": 525
|
3693 |
+
},
|
3694 |
+
{
|
3695 |
+
"epoch": 1.646322378716745,
|
3696 |
+
"grad_norm": NaN,
|
3697 |
+
"learning_rate": 4.297382375540784e-05,
|
3698 |
+
"loss": 0.0,
|
3699 |
+
"step": 526
|
3700 |
+
},
|
3701 |
+
{
|
3702 |
+
"epoch": 1.649452269170579,
|
3703 |
+
"grad_norm": NaN,
|
3704 |
+
"learning_rate": 4.2809637814167505e-05,
|
3705 |
+
"loss": 0.0,
|
3706 |
+
"step": 527
|
3707 |
+
},
|
3708 |
+
{
|
3709 |
+
"epoch": 1.652582159624413,
|
3710 |
+
"grad_norm": NaN,
|
3711 |
+
"learning_rate": 4.264553100456523e-05,
|
3712 |
+
"loss": 0.0,
|
3713 |
+
"step": 528
|
3714 |
+
},
|
3715 |
+
{
|
3716 |
+
"epoch": 1.6557120500782472,
|
3717 |
+
"grad_norm": NaN,
|
3718 |
+
"learning_rate": 4.248150513263528e-05,
|
3719 |
+
"loss": 0.0,
|
3720 |
+
"step": 529
|
3721 |
+
},
|
3722 |
+
{
|
3723 |
+
"epoch": 1.6588419405320813,
|
3724 |
+
"grad_norm": NaN,
|
3725 |
+
"learning_rate": 4.231756200352123e-05,
|
3726 |
+
"loss": 0.0,
|
3727 |
+
"step": 530
|
3728 |
+
},
|
3729 |
+
{
|
3730 |
+
"epoch": 1.6619718309859155,
|
3731 |
+
"grad_norm": NaN,
|
3732 |
+
"learning_rate": 4.215370342145601e-05,
|
3733 |
+
"loss": 0.0,
|
3734 |
+
"step": 531
|
3735 |
+
},
|
3736 |
+
{
|
3737 |
+
"epoch": 1.6651017214397497,
|
3738 |
+
"grad_norm": NaN,
|
3739 |
+
"learning_rate": 4.198993118974212e-05,
|
3740 |
+
"loss": 0.0,
|
3741 |
+
"step": 532
|
3742 |
+
},
|
3743 |
+
{
|
3744 |
+
"epoch": 1.6682316118935838,
|
3745 |
+
"grad_norm": NaN,
|
3746 |
+
"learning_rate": 4.1826247110731755e-05,
|
3747 |
+
"loss": 0.0,
|
3748 |
+
"step": 533
|
3749 |
+
},
|
3750 |
+
{
|
3751 |
+
"epoch": 1.671361502347418,
|
3752 |
+
"grad_norm": NaN,
|
3753 |
+
"learning_rate": 4.166265298580694e-05,
|
3754 |
+
"loss": 0.0,
|
3755 |
+
"step": 534
|
3756 |
+
},
|
3757 |
+
{
|
3758 |
+
"epoch": 1.674491392801252,
|
3759 |
+
"grad_norm": NaN,
|
3760 |
+
"learning_rate": 4.1499150615359726e-05,
|
3761 |
+
"loss": 0.0,
|
3762 |
+
"step": 535
|
3763 |
+
},
|
3764 |
+
{
|
3765 |
+
"epoch": 1.677621283255086,
|
3766 |
+
"grad_norm": NaN,
|
3767 |
+
"learning_rate": 4.133574179877243e-05,
|
3768 |
+
"loss": 0.0,
|
3769 |
+
"step": 536
|
3770 |
+
},
|
3771 |
+
{
|
3772 |
+
"epoch": 1.6807511737089202,
|
3773 |
+
"grad_norm": NaN,
|
3774 |
+
"learning_rate": 4.1172428334397754e-05,
|
3775 |
+
"loss": 0.0,
|
3776 |
+
"step": 537
|
3777 |
+
},
|
3778 |
+
{
|
3779 |
+
"epoch": 1.6838810641627542,
|
3780 |
+
"grad_norm": NaN,
|
3781 |
+
"learning_rate": 4.1009212019539044e-05,
|
3782 |
+
"loss": 0.0,
|
3783 |
+
"step": 538
|
3784 |
+
},
|
3785 |
+
{
|
3786 |
+
"epoch": 1.6870109546165883,
|
3787 |
+
"grad_norm": NaN,
|
3788 |
+
"learning_rate": 4.084609465043047e-05,
|
3789 |
+
"loss": 0.0,
|
3790 |
+
"step": 539
|
3791 |
+
},
|
3792 |
+
{
|
3793 |
+
"epoch": 1.6901408450704225,
|
3794 |
+
"grad_norm": NaN,
|
3795 |
+
"learning_rate": 4.068307802221734e-05,
|
3796 |
+
"loss": 0.0,
|
3797 |
+
"step": 540
|
3798 |
+
},
|
3799 |
+
{
|
3800 |
+
"epoch": 1.6932707355242567,
|
3801 |
+
"grad_norm": NaN,
|
3802 |
+
"learning_rate": 4.052016392893616e-05,
|
3803 |
+
"loss": 0.0,
|
3804 |
+
"step": 541
|
3805 |
+
},
|
3806 |
+
{
|
3807 |
+
"epoch": 1.6964006259780908,
|
3808 |
+
"grad_norm": NaN,
|
3809 |
+
"learning_rate": 4.035735416349515e-05,
|
3810 |
+
"loss": 0.0,
|
3811 |
+
"step": 542
|
3812 |
+
},
|
3813 |
+
{
|
3814 |
+
"epoch": 1.699530516431925,
|
3815 |
+
"grad_norm": NaN,
|
3816 |
+
"learning_rate": 4.0194650517654306e-05,
|
3817 |
+
"loss": 0.0,
|
3818 |
+
"step": 543
|
3819 |
+
},
|
3820 |
+
{
|
3821 |
+
"epoch": 1.7026604068857591,
|
3822 |
+
"grad_norm": NaN,
|
3823 |
+
"learning_rate": 4.003205478200576e-05,
|
3824 |
+
"loss": 0.0,
|
3825 |
+
"step": 544
|
3826 |
+
},
|
3827 |
+
{
|
3828 |
+
"epoch": 1.705790297339593,
|
3829 |
+
"grad_norm": NaN,
|
3830 |
+
"learning_rate": 3.986956874595409e-05,
|
3831 |
+
"loss": 0.0,
|
3832 |
+
"step": 545
|
3833 |
+
},
|
3834 |
+
{
|
3835 |
+
"epoch": 1.7089201877934272,
|
3836 |
+
"grad_norm": NaN,
|
3837 |
+
"learning_rate": 3.9707194197696584e-05,
|
3838 |
+
"loss": 0.0,
|
3839 |
+
"step": 546
|
3840 |
+
},
|
3841 |
+
{
|
3842 |
+
"epoch": 1.7120500782472612,
|
3843 |
+
"grad_norm": NaN,
|
3844 |
+
"learning_rate": 3.9544932924203594e-05,
|
3845 |
+
"loss": 0.0,
|
3846 |
+
"step": 547
|
3847 |
+
},
|
3848 |
+
{
|
3849 |
+
"epoch": 1.7151799687010953,
|
3850 |
+
"grad_norm": NaN,
|
3851 |
+
"learning_rate": 3.9382786711198814e-05,
|
3852 |
+
"loss": 0.0,
|
3853 |
+
"step": 548
|
3854 |
+
},
|
3855 |
+
{
|
3856 |
+
"epoch": 1.7183098591549295,
|
3857 |
+
"grad_norm": NaN,
|
3858 |
+
"learning_rate": 3.922075734313974e-05,
|
3859 |
+
"loss": 0.0,
|
3860 |
+
"step": 549
|
3861 |
+
},
|
3862 |
+
{
|
3863 |
+
"epoch": 1.7214397496087637,
|
3864 |
+
"grad_norm": NaN,
|
3865 |
+
"learning_rate": 3.905884660319792e-05,
|
3866 |
+
"loss": 0.0,
|
3867 |
+
"step": 550
|
3868 |
+
},
|
3869 |
+
{
|
3870 |
+
"epoch": 1.7245696400625978,
|
3871 |
+
"grad_norm": NaN,
|
3872 |
+
"learning_rate": 3.8897056273239354e-05,
|
3873 |
+
"loss": 0.0,
|
3874 |
+
"step": 551
|
3875 |
+
},
|
3876 |
+
{
|
3877 |
+
"epoch": 1.727699530516432,
|
3878 |
+
"grad_norm": NaN,
|
3879 |
+
"learning_rate": 3.8735388133804936e-05,
|
3880 |
+
"loss": 0.0,
|
3881 |
+
"step": 552
|
3882 |
+
},
|
3883 |
+
{
|
3884 |
+
"epoch": 1.7308294209702662,
|
3885 |
+
"grad_norm": NaN,
|
3886 |
+
"learning_rate": 3.857384396409083e-05,
|
3887 |
+
"loss": 0.0,
|
3888 |
+
"step": 553
|
3889 |
+
},
|
3890 |
+
{
|
3891 |
+
"epoch": 1.7339593114241003,
|
3892 |
+
"grad_norm": NaN,
|
3893 |
+
"learning_rate": 3.841242554192884e-05,
|
3894 |
+
"loss": 0.0,
|
3895 |
+
"step": 554
|
3896 |
+
},
|
3897 |
+
{
|
3898 |
+
"epoch": 1.7370892018779343,
|
3899 |
+
"grad_norm": NaN,
|
3900 |
+
"learning_rate": 3.825113464376693e-05,
|
3901 |
+
"loss": 0.0,
|
3902 |
+
"step": 555
|
3903 |
+
},
|
3904 |
+
{
|
3905 |
+
"epoch": 1.7402190923317684,
|
3906 |
+
"grad_norm": NaN,
|
3907 |
+
"learning_rate": 3.808997304464961e-05,
|
3908 |
+
"loss": 0.0,
|
3909 |
+
"step": 556
|
3910 |
+
},
|
3911 |
+
{
|
3912 |
+
"epoch": 1.7433489827856024,
|
3913 |
+
"grad_norm": NaN,
|
3914 |
+
"learning_rate": 3.7928942518198444e-05,
|
3915 |
+
"loss": 0.0,
|
3916 |
+
"step": 557
|
3917 |
+
},
|
3918 |
+
{
|
3919 |
+
"epoch": 1.7464788732394365,
|
3920 |
+
"grad_norm": NaN,
|
3921 |
+
"learning_rate": 3.7768044836592475e-05,
|
3922 |
+
"loss": 0.0,
|
3923 |
+
"step": 558
|
3924 |
+
},
|
3925 |
+
{
|
3926 |
+
"epoch": 1.7496087636932707,
|
3927 |
+
"grad_norm": NaN,
|
3928 |
+
"learning_rate": 3.7607281770548816e-05,
|
3929 |
+
"loss": 0.0,
|
3930 |
+
"step": 559
|
3931 |
+
},
|
3932 |
+
{
|
3933 |
+
"epoch": 1.7527386541471048,
|
3934 |
+
"grad_norm": NaN,
|
3935 |
+
"learning_rate": 3.744665508930303e-05,
|
3936 |
+
"loss": 0.0,
|
3937 |
+
"step": 560
|
3938 |
+
},
|
3939 |
+
{
|
3940 |
+
"epoch": 1.755868544600939,
|
3941 |
+
"grad_norm": NaN,
|
3942 |
+
"learning_rate": 3.728616656058977e-05,
|
3943 |
+
"loss": 0.0,
|
3944 |
+
"step": 561
|
3945 |
+
},
|
3946 |
+
{
|
3947 |
+
"epoch": 1.7589984350547732,
|
3948 |
+
"grad_norm": NaN,
|
3949 |
+
"learning_rate": 3.712581795062331e-05,
|
3950 |
+
"loss": 0.0,
|
3951 |
+
"step": 562
|
3952 |
+
},
|
3953 |
+
{
|
3954 |
+
"epoch": 1.7621283255086073,
|
3955 |
+
"grad_norm": NaN,
|
3956 |
+
"learning_rate": 3.696561102407805e-05,
|
3957 |
+
"loss": 0.0,
|
3958 |
+
"step": 563
|
3959 |
+
},
|
3960 |
+
{
|
3961 |
+
"epoch": 1.7652582159624415,
|
3962 |
+
"grad_norm": NaN,
|
3963 |
+
"learning_rate": 3.6805547544069144e-05,
|
3964 |
+
"loss": 0.0,
|
3965 |
+
"step": 564
|
3966 |
+
},
|
3967 |
+
{
|
3968 |
+
"epoch": 1.7683881064162754,
|
3969 |
+
"grad_norm": NaN,
|
3970 |
+
"learning_rate": 3.664562927213308e-05,
|
3971 |
+
"loss": 0.0,
|
3972 |
+
"step": 565
|
3973 |
+
},
|
3974 |
+
{
|
3975 |
+
"epoch": 1.7715179968701096,
|
3976 |
+
"grad_norm": NaN,
|
3977 |
+
"learning_rate": 3.648585796820833e-05,
|
3978 |
+
"loss": 0.0,
|
3979 |
+
"step": 566
|
3980 |
+
},
|
3981 |
+
{
|
3982 |
+
"epoch": 1.7746478873239435,
|
3983 |
+
"grad_norm": NaN,
|
3984 |
+
"learning_rate": 3.632623539061585e-05,
|
3985 |
+
"loss": 0.0,
|
3986 |
+
"step": 567
|
3987 |
+
},
|
3988 |
+
{
|
3989 |
+
"epoch": 1.7777777777777777,
|
3990 |
+
"grad_norm": NaN,
|
3991 |
+
"learning_rate": 3.616676329603995e-05,
|
3992 |
+
"loss": 0.0,
|
3993 |
+
"step": 568
|
3994 |
+
},
|
3995 |
+
{
|
3996 |
+
"epoch": 1.7809076682316118,
|
3997 |
+
"grad_norm": NaN,
|
3998 |
+
"learning_rate": 3.600744343950876e-05,
|
3999 |
+
"loss": 0.0,
|
4000 |
+
"step": 569
|
4001 |
+
},
|
4002 |
+
{
|
4003 |
+
"epoch": 1.784037558685446,
|
4004 |
+
"grad_norm": NaN,
|
4005 |
+
"learning_rate": 3.5848277574375024e-05,
|
4006 |
+
"loss": 0.0,
|
4007 |
+
"step": 570
|
4008 |
+
},
|
4009 |
+
{
|
4010 |
+
"epoch": 1.7871674491392802,
|
4011 |
+
"grad_norm": NaN,
|
4012 |
+
"learning_rate": 3.568926745229677e-05,
|
4013 |
+
"loss": 0.0,
|
4014 |
+
"step": 571
|
4015 |
+
},
|
4016 |
+
{
|
4017 |
+
"epoch": 1.7902973395931143,
|
4018 |
+
"grad_norm": NaN,
|
4019 |
+
"learning_rate": 3.553041482321805e-05,
|
4020 |
+
"loss": 0.0,
|
4021 |
+
"step": 572
|
4022 |
+
},
|
4023 |
+
{
|
4024 |
+
"epoch": 1.7934272300469485,
|
4025 |
+
"grad_norm": NaN,
|
4026 |
+
"learning_rate": 3.5371721435349615e-05,
|
4027 |
+
"loss": 0.0,
|
4028 |
+
"step": 573
|
4029 |
+
},
|
4030 |
+
{
|
4031 |
+
"epoch": 1.7965571205007824,
|
4032 |
+
"grad_norm": NaN,
|
4033 |
+
"learning_rate": 3.521318903514979e-05,
|
4034 |
+
"loss": 0.0,
|
4035 |
+
"step": 574
|
4036 |
+
},
|
4037 |
+
{
|
4038 |
+
"epoch": 1.7996870109546166,
|
4039 |
+
"grad_norm": NaN,
|
4040 |
+
"learning_rate": 3.505481936730516e-05,
|
4041 |
+
"loss": 0.0,
|
4042 |
+
"step": 575
|
4043 |
+
},
|
4044 |
+
{
|
4045 |
+
"epoch": 1.8028169014084507,
|
4046 |
+
"grad_norm": NaN,
|
4047 |
+
"learning_rate": 3.489661417471142e-05,
|
4048 |
+
"loss": 0.0,
|
4049 |
+
"step": 576
|
4050 |
+
},
|
4051 |
+
{
|
4052 |
+
"epoch": 1.8059467918622847,
|
4053 |
+
"grad_norm": NaN,
|
4054 |
+
"learning_rate": 3.473857519845415e-05,
|
4055 |
+
"loss": 0.0,
|
4056 |
+
"step": 577
|
4057 |
+
},
|
4058 |
+
{
|
4059 |
+
"epoch": 1.8090766823161188,
|
4060 |
+
"grad_norm": NaN,
|
4061 |
+
"learning_rate": 3.458070417778974e-05,
|
4062 |
+
"loss": 0.0,
|
4063 |
+
"step": 578
|
4064 |
+
},
|
4065 |
+
{
|
4066 |
+
"epoch": 1.812206572769953,
|
4067 |
+
"grad_norm": NaN,
|
4068 |
+
"learning_rate": 3.442300285012609e-05,
|
4069 |
+
"loss": 0.0,
|
4070 |
+
"step": 579
|
4071 |
+
},
|
4072 |
+
{
|
4073 |
+
"epoch": 1.8153364632237872,
|
4074 |
+
"grad_norm": NaN,
|
4075 |
+
"learning_rate": 3.4265472951003676e-05,
|
4076 |
+
"loss": 0.0,
|
4077 |
+
"step": 580
|
4078 |
+
},
|
4079 |
+
{
|
4080 |
+
"epoch": 1.8184663536776213,
|
4081 |
+
"grad_norm": NaN,
|
4082 |
+
"learning_rate": 3.410811621407633e-05,
|
4083 |
+
"loss": 0.0,
|
4084 |
+
"step": 581
|
4085 |
+
},
|
4086 |
+
{
|
4087 |
+
"epoch": 1.8215962441314555,
|
4088 |
+
"grad_norm": NaN,
|
4089 |
+
"learning_rate": 3.395093437109219e-05,
|
4090 |
+
"loss": 0.0,
|
4091 |
+
"step": 582
|
4092 |
+
},
|
4093 |
+
{
|
4094 |
+
"epoch": 1.8247261345852896,
|
4095 |
+
"grad_norm": NaN,
|
4096 |
+
"learning_rate": 3.3793929151874635e-05,
|
4097 |
+
"loss": 0.0,
|
4098 |
+
"step": 583
|
4099 |
+
},
|
4100 |
+
{
|
4101 |
+
"epoch": 1.8278560250391236,
|
4102 |
+
"grad_norm": NaN,
|
4103 |
+
"learning_rate": 3.363710228430329e-05,
|
4104 |
+
"loss": 0.0,
|
4105 |
+
"step": 584
|
4106 |
+
},
|
4107 |
+
{
|
4108 |
+
"epoch": 1.8309859154929577,
|
4109 |
+
"grad_norm": NaN,
|
4110 |
+
"learning_rate": 3.348045549429495e-05,
|
4111 |
+
"loss": 0.0,
|
4112 |
+
"step": 585
|
4113 |
+
},
|
4114 |
+
{
|
4115 |
+
"epoch": 1.834115805946792,
|
4116 |
+
"grad_norm": NaN,
|
4117 |
+
"learning_rate": 3.332399050578462e-05,
|
4118 |
+
"loss": 0.0,
|
4119 |
+
"step": 586
|
4120 |
+
},
|
4121 |
+
{
|
4122 |
+
"epoch": 1.8372456964006258,
|
4123 |
+
"grad_norm": NaN,
|
4124 |
+
"learning_rate": 3.3167709040706535e-05,
|
4125 |
+
"loss": 0.0,
|
4126 |
+
"step": 587
|
4127 |
+
},
|
4128 |
+
{
|
4129 |
+
"epoch": 1.84037558685446,
|
4130 |
+
"grad_norm": NaN,
|
4131 |
+
"learning_rate": 3.301161281897523e-05,
|
4132 |
+
"loss": 0.0,
|
4133 |
+
"step": 588
|
4134 |
+
},
|
4135 |
+
{
|
4136 |
+
"epoch": 1.8435054773082942,
|
4137 |
+
"grad_norm": NaN,
|
4138 |
+
"learning_rate": 3.285570355846657e-05,
|
4139 |
+
"loss": 0.0,
|
4140 |
+
"step": 589
|
4141 |
+
},
|
4142 |
+
{
|
4143 |
+
"epoch": 1.8466353677621283,
|
4144 |
+
"grad_norm": NaN,
|
4145 |
+
"learning_rate": 3.269998297499887e-05,
|
4146 |
+
"loss": 0.0,
|
4147 |
+
"step": 590
|
4148 |
+
},
|
4149 |
+
{
|
4150 |
+
"epoch": 1.8497652582159625,
|
4151 |
+
"grad_norm": NaN,
|
4152 |
+
"learning_rate": 3.254445278231405e-05,
|
4153 |
+
"loss": 0.0,
|
4154 |
+
"step": 591
|
4155 |
+
},
|
4156 |
+
{
|
4157 |
+
"epoch": 1.8528951486697967,
|
4158 |
+
"grad_norm": NaN,
|
4159 |
+
"learning_rate": 3.238911469205865e-05,
|
4160 |
+
"loss": 0.0,
|
4161 |
+
"step": 592
|
4162 |
+
},
|
4163 |
+
{
|
4164 |
+
"epoch": 1.8560250391236308,
|
4165 |
+
"grad_norm": NaN,
|
4166 |
+
"learning_rate": 3.223397041376515e-05,
|
4167 |
+
"loss": 0.0,
|
4168 |
+
"step": 593
|
4169 |
+
},
|
4170 |
+
{
|
4171 |
+
"epoch": 1.8591549295774648,
|
4172 |
+
"grad_norm": NaN,
|
4173 |
+
"learning_rate": 3.207902165483305e-05,
|
4174 |
+
"loss": 0.0,
|
4175 |
+
"step": 594
|
4176 |
+
},
|
4177 |
+
{
|
4178 |
+
"epoch": 1.862284820031299,
|
4179 |
+
"grad_norm": NaN,
|
4180 |
+
"learning_rate": 3.1924270120510135e-05,
|
4181 |
+
"loss": 0.0,
|
4182 |
+
"step": 595
|
4183 |
+
},
|
4184 |
+
{
|
4185 |
+
"epoch": 1.8654147104851329,
|
4186 |
+
"grad_norm": NaN,
|
4187 |
+
"learning_rate": 3.176971751387368e-05,
|
4188 |
+
"loss": 0.0,
|
4189 |
+
"step": 596
|
4190 |
+
},
|
4191 |
+
{
|
4192 |
+
"epoch": 1.868544600938967,
|
4193 |
+
"grad_norm": NaN,
|
4194 |
+
"learning_rate": 3.161536553581172e-05,
|
4195 |
+
"loss": 0.0,
|
4196 |
+
"step": 597
|
4197 |
+
},
|
4198 |
+
{
|
4199 |
+
"epoch": 1.8716744913928012,
|
4200 |
+
"grad_norm": NaN,
|
4201 |
+
"learning_rate": 3.1461215885004266e-05,
|
4202 |
+
"loss": 0.0,
|
4203 |
+
"step": 598
|
4204 |
+
},
|
4205 |
+
{
|
4206 |
+
"epoch": 1.8748043818466353,
|
4207 |
+
"grad_norm": NaN,
|
4208 |
+
"learning_rate": 3.1307270257904763e-05,
|
4209 |
+
"loss": 0.0,
|
4210 |
+
"step": 599
|
4211 |
+
},
|
4212 |
+
{
|
4213 |
+
"epoch": 1.8779342723004695,
|
4214 |
+
"grad_norm": NaN,
|
4215 |
+
"learning_rate": 3.1153530348721257e-05,
|
4216 |
+
"loss": 0.0,
|
4217 |
+
"step": 600
|
4218 |
+
},
|
4219 |
+
{
|
4220 |
+
"epoch": 1.8810641627543037,
|
4221 |
+
"grad_norm": NaN,
|
4222 |
+
"learning_rate": 3.099999784939784e-05,
|
4223 |
+
"loss": 0.0,
|
4224 |
+
"step": 601
|
4225 |
+
},
|
4226 |
+
{
|
4227 |
+
"epoch": 1.8841940532081378,
|
4228 |
+
"grad_norm": NaN,
|
4229 |
+
"learning_rate": 3.084667444959605e-05,
|
4230 |
+
"loss": 0.0,
|
4231 |
+
"step": 602
|
4232 |
+
},
|
4233 |
+
{
|
4234 |
+
"epoch": 1.887323943661972,
|
4235 |
+
"grad_norm": NaN,
|
4236 |
+
"learning_rate": 3.0693561836676174e-05,
|
4237 |
+
"loss": 0.0,
|
4238 |
+
"step": 603
|
4239 |
+
},
|
4240 |
+
{
|
4241 |
+
"epoch": 1.890453834115806,
|
4242 |
+
"grad_norm": NaN,
|
4243 |
+
"learning_rate": 3.0540661695678755e-05,
|
4244 |
+
"loss": 0.0,
|
4245 |
+
"step": 604
|
4246 |
+
},
|
4247 |
+
{
|
4248 |
+
"epoch": 1.89358372456964,
|
4249 |
+
"grad_norm": NaN,
|
4250 |
+
"learning_rate": 3.0387975709306043e-05,
|
4251 |
+
"loss": 0.0,
|
4252 |
+
"step": 605
|
4253 |
+
},
|
4254 |
+
{
|
4255 |
+
"epoch": 1.896713615023474,
|
4256 |
+
"grad_norm": NaN,
|
4257 |
+
"learning_rate": 3.0235505557903478e-05,
|
4258 |
+
"loss": 0.0,
|
4259 |
+
"step": 606
|
4260 |
+
},
|
4261 |
+
{
|
4262 |
+
"epoch": 1.8998435054773082,
|
4263 |
+
"grad_norm": NaN,
|
4264 |
+
"learning_rate": 3.0083252919441158e-05,
|
4265 |
+
"loss": 0.0,
|
4266 |
+
"step": 607
|
4267 |
+
},
|
4268 |
+
{
|
4269 |
+
"epoch": 1.9029733959311423,
|
4270 |
+
"grad_norm": NaN,
|
4271 |
+
"learning_rate": 2.993121946949542e-05,
|
4272 |
+
"loss": 0.0,
|
4273 |
+
"step": 608
|
4274 |
+
},
|
4275 |
+
{
|
4276 |
+
"epoch": 1.9061032863849765,
|
4277 |
+
"grad_norm": NaN,
|
4278 |
+
"learning_rate": 2.9779406881230383e-05,
|
4279 |
+
"loss": 0.0,
|
4280 |
+
"step": 609
|
4281 |
+
},
|
4282 |
+
{
|
4283 |
+
"epoch": 1.9092331768388107,
|
4284 |
+
"grad_norm": NaN,
|
4285 |
+
"learning_rate": 2.9627816825379497e-05,
|
4286 |
+
"loss": 0.0,
|
4287 |
+
"step": 610
|
4288 |
+
},
|
4289 |
+
{
|
4290 |
+
"epoch": 1.9123630672926448,
|
4291 |
+
"grad_norm": NaN,
|
4292 |
+
"learning_rate": 2.9476450970227233e-05,
|
4293 |
+
"loss": 0.0,
|
4294 |
+
"step": 611
|
4295 |
+
},
|
4296 |
+
{
|
4297 |
+
"epoch": 1.915492957746479,
|
4298 |
+
"grad_norm": NaN,
|
4299 |
+
"learning_rate": 2.9325310981590642e-05,
|
4300 |
+
"loss": 0.0,
|
4301 |
+
"step": 612
|
4302 |
+
},
|
4303 |
+
{
|
4304 |
+
"epoch": 1.9186228482003131,
|
4305 |
+
"grad_norm": NaN,
|
4306 |
+
"learning_rate": 2.917439852280108e-05,
|
4307 |
+
"loss": 0.0,
|
4308 |
+
"step": 613
|
4309 |
+
},
|
4310 |
+
{
|
4311 |
+
"epoch": 1.921752738654147,
|
4312 |
+
"grad_norm": NaN,
|
4313 |
+
"learning_rate": 2.9023715254685903e-05,
|
4314 |
+
"loss": 0.0,
|
4315 |
+
"step": 614
|
4316 |
+
},
|
4317 |
+
{
|
4318 |
+
"epoch": 1.9248826291079812,
|
4319 |
+
"grad_norm": NaN,
|
4320 |
+
"learning_rate": 2.8873262835550118e-05,
|
4321 |
+
"loss": 0.0,
|
4322 |
+
"step": 615
|
4323 |
+
},
|
4324 |
+
{
|
4325 |
+
"epoch": 1.9280125195618152,
|
4326 |
+
"grad_norm": NaN,
|
4327 |
+
"learning_rate": 2.872304292115828e-05,
|
4328 |
+
"loss": 0.0,
|
4329 |
+
"step": 616
|
4330 |
+
},
|
4331 |
+
{
|
4332 |
+
"epoch": 1.9311424100156493,
|
4333 |
+
"grad_norm": NaN,
|
4334 |
+
"learning_rate": 2.857305716471601e-05,
|
4335 |
+
"loss": 0.0,
|
4336 |
+
"step": 617
|
4337 |
+
},
|
4338 |
+
{
|
4339 |
+
"epoch": 1.9342723004694835,
|
4340 |
+
"grad_norm": NaN,
|
4341 |
+
"learning_rate": 2.8423307216852142e-05,
|
4342 |
+
"loss": 0.0,
|
4343 |
+
"step": 618
|
4344 |
+
},
|
4345 |
+
{
|
4346 |
+
"epoch": 1.9374021909233177,
|
4347 |
+
"grad_norm": NaN,
|
4348 |
+
"learning_rate": 2.8273794725600255e-05,
|
4349 |
+
"loss": 0.0,
|
4350 |
+
"step": 619
|
4351 |
+
},
|
4352 |
+
{
|
4353 |
+
"epoch": 1.9405320813771518,
|
4354 |
+
"grad_norm": NaN,
|
4355 |
+
"learning_rate": 2.812452133638075e-05,
|
4356 |
+
"loss": 0.0,
|
4357 |
+
"step": 620
|
4358 |
+
},
|
4359 |
+
{
|
4360 |
+
"epoch": 1.943661971830986,
|
4361 |
+
"grad_norm": NaN,
|
4362 |
+
"learning_rate": 2.7975488691982578e-05,
|
4363 |
+
"loss": 0.0,
|
4364 |
+
"step": 621
|
4365 |
+
},
|
4366 |
+
{
|
4367 |
+
"epoch": 1.9467918622848202,
|
4368 |
+
"grad_norm": NaN,
|
4369 |
+
"learning_rate": 2.7826698432545317e-05,
|
4370 |
+
"loss": 0.0,
|
4371 |
+
"step": 622
|
4372 |
+
},
|
4373 |
+
{
|
4374 |
+
"epoch": 1.9499217527386543,
|
4375 |
+
"grad_norm": NaN,
|
4376 |
+
"learning_rate": 2.767815219554094e-05,
|
4377 |
+
"loss": 0.0,
|
4378 |
+
"step": 623
|
4379 |
+
},
|
4380 |
+
{
|
4381 |
+
"epoch": 1.9530516431924883,
|
4382 |
+
"grad_norm": NaN,
|
4383 |
+
"learning_rate": 2.7529851615755993e-05,
|
4384 |
+
"loss": 0.0,
|
4385 |
+
"step": 624
|
4386 |
+
},
|
4387 |
+
{
|
4388 |
+
"epoch": 1.9561815336463224,
|
4389 |
+
"grad_norm": NaN,
|
4390 |
+
"learning_rate": 2.738179832527343e-05,
|
4391 |
+
"loss": 0.0,
|
4392 |
+
"step": 625
|
4393 |
+
},
|
4394 |
+
{
|
4395 |
+
"epoch": 1.9593114241001564,
|
4396 |
+
"grad_norm": NaN,
|
4397 |
+
"learning_rate": 2.7233993953454795e-05,
|
4398 |
+
"loss": 0.0,
|
4399 |
+
"step": 626
|
4400 |
+
},
|
4401 |
+
{
|
4402 |
+
"epoch": 1.9624413145539905,
|
4403 |
+
"grad_norm": NaN,
|
4404 |
+
"learning_rate": 2.7086440126922163e-05,
|
4405 |
+
"loss": 0.0,
|
4406 |
+
"step": 627
|
4407 |
+
},
|
4408 |
+
{
|
4409 |
+
"epoch": 1.9655712050078247,
|
4410 |
+
"grad_norm": NaN,
|
4411 |
+
"learning_rate": 2.693913846954036e-05,
|
4412 |
+
"loss": 0.0,
|
4413 |
+
"step": 628
|
4414 |
+
},
|
4415 |
+
{
|
4416 |
+
"epoch": 1.9687010954616588,
|
4417 |
+
"grad_norm": NaN,
|
4418 |
+
"learning_rate": 2.6792090602398966e-05,
|
4419 |
+
"loss": 0.0,
|
4420 |
+
"step": 629
|
4421 |
+
},
|
4422 |
+
{
|
4423 |
+
"epoch": 1.971830985915493,
|
4424 |
+
"grad_norm": NaN,
|
4425 |
+
"learning_rate": 2.664529814379457e-05,
|
4426 |
+
"loss": 0.0,
|
4427 |
+
"step": 630
|
4428 |
+
},
|
4429 |
+
{
|
4430 |
+
"epoch": 1.9749608763693272,
|
4431 |
+
"grad_norm": NaN,
|
4432 |
+
"learning_rate": 2.6498762709212956e-05,
|
4433 |
+
"loss": 0.0,
|
4434 |
+
"step": 631
|
4435 |
+
},
|
4436 |
+
{
|
4437 |
+
"epoch": 1.9780907668231613,
|
4438 |
+
"grad_norm": NaN,
|
4439 |
+
"learning_rate": 2.6352485911311253e-05,
|
4440 |
+
"loss": 0.0,
|
4441 |
+
"step": 632
|
4442 |
+
},
|
4443 |
+
{
|
4444 |
+
"epoch": 1.9812206572769953,
|
4445 |
+
"grad_norm": NaN,
|
4446 |
+
"learning_rate": 2.6206469359900236e-05,
|
4447 |
+
"loss": 0.0,
|
4448 |
+
"step": 633
|
4449 |
+
},
|
4450 |
+
{
|
4451 |
+
"epoch": 1.9843505477308294,
|
4452 |
+
"grad_norm": NaN,
|
4453 |
+
"learning_rate": 2.6060714661926654e-05,
|
4454 |
+
"loss": 0.0,
|
4455 |
+
"step": 634
|
4456 |
+
},
|
4457 |
+
{
|
4458 |
+
"epoch": 1.9874804381846636,
|
4459 |
+
"grad_norm": NaN,
|
4460 |
+
"learning_rate": 2.5915223421455448e-05,
|
4461 |
+
"loss": 0.0,
|
4462 |
+
"step": 635
|
4463 |
+
},
|
4464 |
+
{
|
4465 |
+
"epoch": 1.9906103286384975,
|
4466 |
+
"grad_norm": NaN,
|
4467 |
+
"learning_rate": 2.5769997239652137e-05,
|
4468 |
+
"loss": 0.0,
|
4469 |
+
"step": 636
|
4470 |
+
},
|
4471 |
+
{
|
4472 |
+
"epoch": 1.9937402190923317,
|
4473 |
+
"grad_norm": NaN,
|
4474 |
+
"learning_rate": 2.5625037714765277e-05,
|
4475 |
+
"loss": 0.0,
|
4476 |
+
"step": 637
|
4477 |
+
},
|
4478 |
+
{
|
4479 |
+
"epoch": 1.9968701095461658,
|
4480 |
+
"grad_norm": NaN,
|
4481 |
+
"learning_rate": 2.5480346442108714e-05,
|
4482 |
+
"loss": 0.0,
|
4483 |
+
"step": 638
|
4484 |
+
},
|
4485 |
+
{
|
4486 |
+
"epoch": 2.0,
|
4487 |
+
"grad_norm": NaN,
|
4488 |
+
"learning_rate": 2.5335925014044193e-05,
|
4489 |
+
"loss": 0.0,
|
4490 |
+
"step": 639
|
4491 |
+
},
|
4492 |
+
{
|
4493 |
+
"epoch": 2.0,
|
4494 |
+
"eval_loss": NaN,
|
4495 |
+
"eval_runtime": 15.1515,
|
4496 |
+
"eval_samples_per_second": 35.574,
|
4497 |
+
"eval_steps_per_second": 8.91,
|
4498 |
+
"step": 639
|
4499 |
}
|
4500 |
],
|
4501 |
"logging_steps": 1,
|
|
|
4515 |
"attributes": {}
|
4516 |
}
|
4517 |
},
|
4518 |
+
"total_flos": 1.6924437727779226e+17,
|
4519 |
"train_batch_size": 4,
|
4520 |
"trial_name": null,
|
4521 |
"trial_params": null
|