Training in progress, epoch 2, checkpoint
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +2230 -4
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 295765866
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ca893e837a53b374ed2f99ea17041351244e9cb4f575e9134fd7a17e1c7cb38
|
3 |
size 295765866
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:313a10245f808f3151834d36dac7710e5dc491246ac6975d4f864e76c8039a41
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4723bc5d84210296bc0ea1781885cc3d6f5fc81478f5a4267b57964978ad6fec
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 2.
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -4496,6 +4496,2232 @@
|
|
4496 |
"eval_samples_per_second": 35.574,
|
4497 |
"eval_steps_per_second": 8.91,
|
4498 |
"step": 639
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4499 |
}
|
4500 |
],
|
4501 |
"logging_steps": 1,
|
@@ -4510,12 +6736,12 @@
|
|
4510 |
"should_evaluate": false,
|
4511 |
"should_log": false,
|
4512 |
"should_save": true,
|
4513 |
-
"should_training_stop":
|
4514 |
},
|
4515 |
"attributes": {}
|
4516 |
}
|
4517 |
},
|
4518 |
-
"total_flos":
|
4519 |
"train_batch_size": 4,
|
4520 |
"trial_name": null,
|
4521 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 2.995305164319249,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 957,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
4496 |
"eval_samples_per_second": 35.574,
|
4497 |
"eval_steps_per_second": 8.91,
|
4498 |
"step": 639
|
4499 |
+
},
|
4500 |
+
{
|
4501 |
+
"epoch": 2.003129890453834,
|
4502 |
+
"grad_norm": NaN,
|
4503 |
+
"learning_rate": 2.5191775019963686e-05,
|
4504 |
+
"loss": 0.0,
|
4505 |
+
"step": 640
|
4506 |
+
},
|
4507 |
+
{
|
4508 |
+
"epoch": 2.0062597809076683,
|
4509 |
+
"grad_norm": NaN,
|
4510 |
+
"learning_rate": 2.5047898046272057e-05,
|
4511 |
+
"loss": 0.0,
|
4512 |
+
"step": 641
|
4513 |
+
},
|
4514 |
+
{
|
4515 |
+
"epoch": 2.0093896713615025,
|
4516 |
+
"grad_norm": NaN,
|
4517 |
+
"learning_rate": 2.490429567636939e-05,
|
4518 |
+
"loss": 0.0,
|
4519 |
+
"step": 642
|
4520 |
+
},
|
4521 |
+
{
|
4522 |
+
"epoch": 2.0125195618153366,
|
4523 |
+
"grad_norm": NaN,
|
4524 |
+
"learning_rate": 2.4760969490633807e-05,
|
4525 |
+
"loss": 0.0,
|
4526 |
+
"step": 643
|
4527 |
+
},
|
4528 |
+
{
|
4529 |
+
"epoch": 2.0156494522691704,
|
4530 |
+
"grad_norm": NaN,
|
4531 |
+
"learning_rate": 2.4617921066403888e-05,
|
4532 |
+
"loss": 0.0,
|
4533 |
+
"step": 644
|
4534 |
+
},
|
4535 |
+
{
|
4536 |
+
"epoch": 2.0187793427230045,
|
4537 |
+
"grad_norm": NaN,
|
4538 |
+
"learning_rate": 2.4475151977961437e-05,
|
4539 |
+
"loss": 0.0,
|
4540 |
+
"step": 645
|
4541 |
+
},
|
4542 |
+
{
|
4543 |
+
"epoch": 2.0219092331768387,
|
4544 |
+
"grad_norm": NaN,
|
4545 |
+
"learning_rate": 2.4332663796514032e-05,
|
4546 |
+
"loss": 0.0,
|
4547 |
+
"step": 646
|
4548 |
+
},
|
4549 |
+
{
|
4550 |
+
"epoch": 2.025039123630673,
|
4551 |
+
"grad_norm": NaN,
|
4552 |
+
"learning_rate": 2.41904580901779e-05,
|
4553 |
+
"loss": 0.0,
|
4554 |
+
"step": 647
|
4555 |
+
},
|
4556 |
+
{
|
4557 |
+
"epoch": 2.028169014084507,
|
4558 |
+
"grad_norm": NaN,
|
4559 |
+
"learning_rate": 2.4048536423960428e-05,
|
4560 |
+
"loss": 0.0,
|
4561 |
+
"step": 648
|
4562 |
+
},
|
4563 |
+
{
|
4564 |
+
"epoch": 2.031298904538341,
|
4565 |
+
"grad_norm": NaN,
|
4566 |
+
"learning_rate": 2.390690035974321e-05,
|
4567 |
+
"loss": 0.0,
|
4568 |
+
"step": 649
|
4569 |
+
},
|
4570 |
+
{
|
4571 |
+
"epoch": 2.0344287949921753,
|
4572 |
+
"grad_norm": NaN,
|
4573 |
+
"learning_rate": 2.3765551456264618e-05,
|
4574 |
+
"loss": 0.0,
|
4575 |
+
"step": 650
|
4576 |
+
},
|
4577 |
+
{
|
4578 |
+
"epoch": 2.0375586854460095,
|
4579 |
+
"grad_norm": NaN,
|
4580 |
+
"learning_rate": 2.3624491269102844e-05,
|
4581 |
+
"loss": 0.0,
|
4582 |
+
"step": 651
|
4583 |
+
},
|
4584 |
+
{
|
4585 |
+
"epoch": 2.0406885758998436,
|
4586 |
+
"grad_norm": NaN,
|
4587 |
+
"learning_rate": 2.3483721350658593e-05,
|
4588 |
+
"loss": 0.0,
|
4589 |
+
"step": 652
|
4590 |
+
},
|
4591 |
+
{
|
4592 |
+
"epoch": 2.043818466353678,
|
4593 |
+
"grad_norm": NaN,
|
4594 |
+
"learning_rate": 2.334324325013821e-05,
|
4595 |
+
"loss": 0.0,
|
4596 |
+
"step": 653
|
4597 |
+
},
|
4598 |
+
{
|
4599 |
+
"epoch": 2.0469483568075115,
|
4600 |
+
"grad_norm": NaN,
|
4601 |
+
"learning_rate": 2.3203058513536364e-05,
|
4602 |
+
"loss": 0.0,
|
4603 |
+
"step": 654
|
4604 |
+
},
|
4605 |
+
{
|
4606 |
+
"epoch": 2.0500782472613457,
|
4607 |
+
"grad_norm": NaN,
|
4608 |
+
"learning_rate": 2.3063168683619325e-05,
|
4609 |
+
"loss": 0.0,
|
4610 |
+
"step": 655
|
4611 |
+
},
|
4612 |
+
{
|
4613 |
+
"epoch": 2.05320813771518,
|
4614 |
+
"grad_norm": NaN,
|
4615 |
+
"learning_rate": 2.2923575299907753e-05,
|
4616 |
+
"loss": 0.0,
|
4617 |
+
"step": 656
|
4618 |
+
},
|
4619 |
+
{
|
4620 |
+
"epoch": 2.056338028169014,
|
4621 |
+
"grad_norm": NaN,
|
4622 |
+
"learning_rate": 2.2784279898659914e-05,
|
4623 |
+
"loss": 0.0,
|
4624 |
+
"step": 657
|
4625 |
+
},
|
4626 |
+
{
|
4627 |
+
"epoch": 2.059467918622848,
|
4628 |
+
"grad_norm": NaN,
|
4629 |
+
"learning_rate": 2.2645284012854644e-05,
|
4630 |
+
"loss": 0.0,
|
4631 |
+
"step": 658
|
4632 |
+
},
|
4633 |
+
{
|
4634 |
+
"epoch": 2.0625978090766823,
|
4635 |
+
"grad_norm": NaN,
|
4636 |
+
"learning_rate": 2.2506589172174607e-05,
|
4637 |
+
"loss": 0.0,
|
4638 |
+
"step": 659
|
4639 |
+
},
|
4640 |
+
{
|
4641 |
+
"epoch": 2.0657276995305165,
|
4642 |
+
"grad_norm": NaN,
|
4643 |
+
"learning_rate": 2.2368196902989342e-05,
|
4644 |
+
"loss": 0.0,
|
4645 |
+
"step": 660
|
4646 |
+
},
|
4647 |
+
{
|
4648 |
+
"epoch": 2.0688575899843507,
|
4649 |
+
"grad_norm": NaN,
|
4650 |
+
"learning_rate": 2.2230108728338506e-05,
|
4651 |
+
"loss": 0.0,
|
4652 |
+
"step": 661
|
4653 |
+
},
|
4654 |
+
{
|
4655 |
+
"epoch": 2.071987480438185,
|
4656 |
+
"grad_norm": NaN,
|
4657 |
+
"learning_rate": 2.2092326167915194e-05,
|
4658 |
+
"loss": 0.0,
|
4659 |
+
"step": 662
|
4660 |
+
},
|
4661 |
+
{
|
4662 |
+
"epoch": 2.075117370892019,
|
4663 |
+
"grad_norm": NaN,
|
4664 |
+
"learning_rate": 2.1954850738049086e-05,
|
4665 |
+
"loss": 0.0,
|
4666 |
+
"step": 663
|
4667 |
+
},
|
4668 |
+
{
|
4669 |
+
"epoch": 2.0782472613458527,
|
4670 |
+
"grad_norm": NaN,
|
4671 |
+
"learning_rate": 2.181768395168981e-05,
|
4672 |
+
"loss": 0.0,
|
4673 |
+
"step": 664
|
4674 |
+
},
|
4675 |
+
{
|
4676 |
+
"epoch": 2.081377151799687,
|
4677 |
+
"grad_norm": NaN,
|
4678 |
+
"learning_rate": 2.1680827318390355e-05,
|
4679 |
+
"loss": 0.0,
|
4680 |
+
"step": 665
|
4681 |
+
},
|
4682 |
+
{
|
4683 |
+
"epoch": 2.084507042253521,
|
4684 |
+
"grad_norm": NaN,
|
4685 |
+
"learning_rate": 2.1544282344290362e-05,
|
4686 |
+
"loss": 0.0,
|
4687 |
+
"step": 666
|
4688 |
+
},
|
4689 |
+
{
|
4690 |
+
"epoch": 2.087636932707355,
|
4691 |
+
"grad_norm": NaN,
|
4692 |
+
"learning_rate": 2.1408050532099568e-05,
|
4693 |
+
"loss": 0.0,
|
4694 |
+
"step": 667
|
4695 |
+
},
|
4696 |
+
{
|
4697 |
+
"epoch": 2.0907668231611893,
|
4698 |
+
"grad_norm": NaN,
|
4699 |
+
"learning_rate": 2.1272133381081356e-05,
|
4700 |
+
"loss": 0.0,
|
4701 |
+
"step": 668
|
4702 |
+
},
|
4703 |
+
{
|
4704 |
+
"epoch": 2.0938967136150235,
|
4705 |
+
"grad_norm": NaN,
|
4706 |
+
"learning_rate": 2.1136532387036107e-05,
|
4707 |
+
"loss": 0.0,
|
4708 |
+
"step": 669
|
4709 |
+
},
|
4710 |
+
{
|
4711 |
+
"epoch": 2.0970266040688577,
|
4712 |
+
"grad_norm": NaN,
|
4713 |
+
"learning_rate": 2.100124904228491e-05,
|
4714 |
+
"loss": 0.0,
|
4715 |
+
"step": 670
|
4716 |
+
},
|
4717 |
+
{
|
4718 |
+
"epoch": 2.100156494522692,
|
4719 |
+
"grad_norm": NaN,
|
4720 |
+
"learning_rate": 2.0866284835652955e-05,
|
4721 |
+
"loss": 0.0,
|
4722 |
+
"step": 671
|
4723 |
+
},
|
4724 |
+
{
|
4725 |
+
"epoch": 2.103286384976526,
|
4726 |
+
"grad_norm": NaN,
|
4727 |
+
"learning_rate": 2.0731641252453344e-05,
|
4728 |
+
"loss": 0.0,
|
4729 |
+
"step": 672
|
4730 |
+
},
|
4731 |
+
{
|
4732 |
+
"epoch": 2.10641627543036,
|
4733 |
+
"grad_norm": NaN,
|
4734 |
+
"learning_rate": 2.0597319774470507e-05,
|
4735 |
+
"loss": 0.0,
|
4736 |
+
"step": 673
|
4737 |
+
},
|
4738 |
+
{
|
4739 |
+
"epoch": 2.109546165884194,
|
4740 |
+
"grad_norm": NaN,
|
4741 |
+
"learning_rate": 2.046332187994417e-05,
|
4742 |
+
"loss": 0.0,
|
4743 |
+
"step": 674
|
4744 |
+
},
|
4745 |
+
{
|
4746 |
+
"epoch": 2.112676056338028,
|
4747 |
+
"grad_norm": NaN,
|
4748 |
+
"learning_rate": 2.0329649043552845e-05,
|
4749 |
+
"loss": 0.0,
|
4750 |
+
"step": 675
|
4751 |
+
},
|
4752 |
+
{
|
4753 |
+
"epoch": 2.115805946791862,
|
4754 |
+
"grad_norm": NaN,
|
4755 |
+
"learning_rate": 2.0196302736397777e-05,
|
4756 |
+
"loss": 0.0,
|
4757 |
+
"step": 676
|
4758 |
+
},
|
4759 |
+
{
|
4760 |
+
"epoch": 2.1189358372456963,
|
4761 |
+
"grad_norm": NaN,
|
4762 |
+
"learning_rate": 2.0063284425986606e-05,
|
4763 |
+
"loss": 0.0,
|
4764 |
+
"step": 677
|
4765 |
+
},
|
4766 |
+
{
|
4767 |
+
"epoch": 2.1220657276995305,
|
4768 |
+
"grad_norm": NaN,
|
4769 |
+
"learning_rate": 1.9930595576217383e-05,
|
4770 |
+
"loss": 0.0,
|
4771 |
+
"step": 678
|
4772 |
+
},
|
4773 |
+
{
|
4774 |
+
"epoch": 2.1251956181533647,
|
4775 |
+
"grad_norm": NaN,
|
4776 |
+
"learning_rate": 1.9798237647362244e-05,
|
4777 |
+
"loss": 0.0,
|
4778 |
+
"step": 679
|
4779 |
+
},
|
4780 |
+
{
|
4781 |
+
"epoch": 2.128325508607199,
|
4782 |
+
"grad_norm": NaN,
|
4783 |
+
"learning_rate": 1.9666212096051572e-05,
|
4784 |
+
"loss": 0.0,
|
4785 |
+
"step": 680
|
4786 |
+
},
|
4787 |
+
{
|
4788 |
+
"epoch": 2.131455399061033,
|
4789 |
+
"grad_norm": NaN,
|
4790 |
+
"learning_rate": 1.9534520375257793e-05,
|
4791 |
+
"loss": 0.0,
|
4792 |
+
"step": 681
|
4793 |
+
},
|
4794 |
+
{
|
4795 |
+
"epoch": 2.134585289514867,
|
4796 |
+
"grad_norm": NaN,
|
4797 |
+
"learning_rate": 1.9403163934279495e-05,
|
4798 |
+
"loss": 0.0,
|
4799 |
+
"step": 682
|
4800 |
+
},
|
4801 |
+
{
|
4802 |
+
"epoch": 2.1377151799687013,
|
4803 |
+
"grad_norm": NaN,
|
4804 |
+
"learning_rate": 1.9272144218725376e-05,
|
4805 |
+
"loss": 0.0,
|
4806 |
+
"step": 683
|
4807 |
+
},
|
4808 |
+
{
|
4809 |
+
"epoch": 2.140845070422535,
|
4810 |
+
"grad_norm": NaN,
|
4811 |
+
"learning_rate": 1.9141462670498456e-05,
|
4812 |
+
"loss": 0.0,
|
4813 |
+
"step": 684
|
4814 |
+
},
|
4815 |
+
{
|
4816 |
+
"epoch": 2.143974960876369,
|
4817 |
+
"grad_norm": NaN,
|
4818 |
+
"learning_rate": 1.9011120727780096e-05,
|
4819 |
+
"loss": 0.0,
|
4820 |
+
"step": 685
|
4821 |
+
},
|
4822 |
+
{
|
4823 |
+
"epoch": 2.1471048513302033,
|
4824 |
+
"grad_norm": NaN,
|
4825 |
+
"learning_rate": 1.8881119825014227e-05,
|
4826 |
+
"loss": 0.0,
|
4827 |
+
"step": 686
|
4828 |
+
},
|
4829 |
+
{
|
4830 |
+
"epoch": 2.1502347417840375,
|
4831 |
+
"grad_norm": NaN,
|
4832 |
+
"learning_rate": 1.8751461392891545e-05,
|
4833 |
+
"loss": 0.0,
|
4834 |
+
"step": 687
|
4835 |
+
},
|
4836 |
+
{
|
4837 |
+
"epoch": 2.1533646322378717,
|
4838 |
+
"grad_norm": NaN,
|
4839 |
+
"learning_rate": 1.862214685833383e-05,
|
4840 |
+
"loss": 0.0,
|
4841 |
+
"step": 688
|
4842 |
+
},
|
4843 |
+
{
|
4844 |
+
"epoch": 2.156494522691706,
|
4845 |
+
"grad_norm": NaN,
|
4846 |
+
"learning_rate": 1.8493177644478098e-05,
|
4847 |
+
"loss": 0.0,
|
4848 |
+
"step": 689
|
4849 |
+
},
|
4850 |
+
{
|
4851 |
+
"epoch": 2.15962441314554,
|
4852 |
+
"grad_norm": NaN,
|
4853 |
+
"learning_rate": 1.836455517066113e-05,
|
4854 |
+
"loss": 0.0,
|
4855 |
+
"step": 690
|
4856 |
+
},
|
4857 |
+
{
|
4858 |
+
"epoch": 2.162754303599374,
|
4859 |
+
"grad_norm": NaN,
|
4860 |
+
"learning_rate": 1.823628085240367e-05,
|
4861 |
+
"loss": 0.0,
|
4862 |
+
"step": 691
|
4863 |
+
},
|
4864 |
+
{
|
4865 |
+
"epoch": 2.1658841940532083,
|
4866 |
+
"grad_norm": NaN,
|
4867 |
+
"learning_rate": 1.8108356101394964e-05,
|
4868 |
+
"loss": 0.0,
|
4869 |
+
"step": 692
|
4870 |
+
},
|
4871 |
+
{
|
4872 |
+
"epoch": 2.169014084507042,
|
4873 |
+
"grad_norm": NaN,
|
4874 |
+
"learning_rate": 1.7980782325477163e-05,
|
4875 |
+
"loss": 0.0,
|
4876 |
+
"step": 693
|
4877 |
+
},
|
4878 |
+
{
|
4879 |
+
"epoch": 2.172143974960876,
|
4880 |
+
"grad_norm": NaN,
|
4881 |
+
"learning_rate": 1.7853560928629903e-05,
|
4882 |
+
"loss": 0.0,
|
4883 |
+
"step": 694
|
4884 |
+
},
|
4885 |
+
{
|
4886 |
+
"epoch": 2.1752738654147104,
|
4887 |
+
"grad_norm": NaN,
|
4888 |
+
"learning_rate": 1.7726693310954745e-05,
|
4889 |
+
"loss": 0.0,
|
4890 |
+
"step": 695
|
4891 |
+
},
|
4892 |
+
{
|
4893 |
+
"epoch": 2.1784037558685445,
|
4894 |
+
"grad_norm": NaN,
|
4895 |
+
"learning_rate": 1.7600180868659887e-05,
|
4896 |
+
"loss": 0.0,
|
4897 |
+
"step": 696
|
4898 |
+
},
|
4899 |
+
{
|
4900 |
+
"epoch": 2.1815336463223787,
|
4901 |
+
"grad_norm": NaN,
|
4902 |
+
"learning_rate": 1.7474024994044685e-05,
|
4903 |
+
"loss": 0.0,
|
4904 |
+
"step": 697
|
4905 |
+
},
|
4906 |
+
{
|
4907 |
+
"epoch": 2.184663536776213,
|
4908 |
+
"grad_norm": NaN,
|
4909 |
+
"learning_rate": 1.7348227075484396e-05,
|
4910 |
+
"loss": 0.0,
|
4911 |
+
"step": 698
|
4912 |
+
},
|
4913 |
+
{
|
4914 |
+
"epoch": 2.187793427230047,
|
4915 |
+
"grad_norm": NaN,
|
4916 |
+
"learning_rate": 1.7222788497414922e-05,
|
4917 |
+
"loss": 0.0,
|
4918 |
+
"step": 699
|
4919 |
+
},
|
4920 |
+
{
|
4921 |
+
"epoch": 2.190923317683881,
|
4922 |
+
"grad_norm": NaN,
|
4923 |
+
"learning_rate": 1.709771064031747e-05,
|
4924 |
+
"loss": 0.0,
|
4925 |
+
"step": 700
|
4926 |
+
},
|
4927 |
+
{
|
4928 |
+
"epoch": 2.1940532081377153,
|
4929 |
+
"grad_norm": NaN,
|
4930 |
+
"learning_rate": 1.697299488070352e-05,
|
4931 |
+
"loss": 0.0,
|
4932 |
+
"step": 701
|
4933 |
+
},
|
4934 |
+
{
|
4935 |
+
"epoch": 2.1971830985915495,
|
4936 |
+
"grad_norm": NaN,
|
4937 |
+
"learning_rate": 1.6848642591099477e-05,
|
4938 |
+
"loss": 0.0,
|
4939 |
+
"step": 702
|
4940 |
+
},
|
4941 |
+
{
|
4942 |
+
"epoch": 2.2003129890453836,
|
4943 |
+
"grad_norm": NaN,
|
4944 |
+
"learning_rate": 1.6724655140031774e-05,
|
4945 |
+
"loss": 0.0,
|
4946 |
+
"step": 703
|
4947 |
+
},
|
4948 |
+
{
|
4949 |
+
"epoch": 2.2034428794992174,
|
4950 |
+
"grad_norm": NaN,
|
4951 |
+
"learning_rate": 1.6601033892011643e-05,
|
4952 |
+
"loss": 0.0,
|
4953 |
+
"step": 704
|
4954 |
+
},
|
4955 |
+
{
|
4956 |
+
"epoch": 2.2065727699530515,
|
4957 |
+
"grad_norm": NaN,
|
4958 |
+
"learning_rate": 1.6477780207520177e-05,
|
4959 |
+
"loss": 0.0,
|
4960 |
+
"step": 705
|
4961 |
+
},
|
4962 |
+
{
|
4963 |
+
"epoch": 2.2097026604068857,
|
4964 |
+
"grad_norm": NaN,
|
4965 |
+
"learning_rate": 1.635489544299334e-05,
|
4966 |
+
"loss": 0.0,
|
4967 |
+
"step": 706
|
4968 |
+
},
|
4969 |
+
{
|
4970 |
+
"epoch": 2.21283255086072,
|
4971 |
+
"grad_norm": NaN,
|
4972 |
+
"learning_rate": 1.623238095080709e-05,
|
4973 |
+
"loss": 0.0,
|
4974 |
+
"step": 707
|
4975 |
+
},
|
4976 |
+
{
|
4977 |
+
"epoch": 2.215962441314554,
|
4978 |
+
"grad_norm": NaN,
|
4979 |
+
"learning_rate": 1.6110238079262386e-05,
|
4980 |
+
"loss": 0.0,
|
4981 |
+
"step": 708
|
4982 |
+
},
|
4983 |
+
{
|
4984 |
+
"epoch": 2.219092331768388,
|
4985 |
+
"grad_norm": NaN,
|
4986 |
+
"learning_rate": 1.598846817257047e-05,
|
4987 |
+
"loss": 0.0,
|
4988 |
+
"step": 709
|
4989 |
+
},
|
4990 |
+
{
|
4991 |
+
"epoch": 2.2222222222222223,
|
4992 |
+
"grad_norm": NaN,
|
4993 |
+
"learning_rate": 1.5867072570837983e-05,
|
4994 |
+
"loss": 0.0,
|
4995 |
+
"step": 710
|
4996 |
+
},
|
4997 |
+
{
|
4998 |
+
"epoch": 2.2253521126760565,
|
4999 |
+
"grad_norm": NaN,
|
5000 |
+
"learning_rate": 1.574605261005225e-05,
|
5001 |
+
"loss": 0.0,
|
5002 |
+
"step": 711
|
5003 |
+
},
|
5004 |
+
{
|
5005 |
+
"epoch": 2.2284820031298906,
|
5006 |
+
"grad_norm": NaN,
|
5007 |
+
"learning_rate": 1.562540962206659e-05,
|
5008 |
+
"loss": 0.0,
|
5009 |
+
"step": 712
|
5010 |
+
},
|
5011 |
+
{
|
5012 |
+
"epoch": 2.2316118935837244,
|
5013 |
+
"grad_norm": NaN,
|
5014 |
+
"learning_rate": 1.5505144934585663e-05,
|
5015 |
+
"loss": 0.0,
|
5016 |
+
"step": 713
|
5017 |
+
},
|
5018 |
+
{
|
5019 |
+
"epoch": 2.2347417840375585,
|
5020 |
+
"grad_norm": NaN,
|
5021 |
+
"learning_rate": 1.5385259871150802e-05,
|
5022 |
+
"loss": 0.0,
|
5023 |
+
"step": 714
|
5024 |
+
},
|
5025 |
+
{
|
5026 |
+
"epoch": 2.2378716744913927,
|
5027 |
+
"grad_norm": NaN,
|
5028 |
+
"learning_rate": 1.5265755751125544e-05,
|
5029 |
+
"loss": 0.0,
|
5030 |
+
"step": 715
|
5031 |
+
},
|
5032 |
+
{
|
5033 |
+
"epoch": 2.241001564945227,
|
5034 |
+
"grad_norm": NaN,
|
5035 |
+
"learning_rate": 1.5146633889680994e-05,
|
5036 |
+
"loss": 0.0,
|
5037 |
+
"step": 716
|
5038 |
+
},
|
5039 |
+
{
|
5040 |
+
"epoch": 2.244131455399061,
|
5041 |
+
"grad_norm": NaN,
|
5042 |
+
"learning_rate": 1.5027895597781444e-05,
|
5043 |
+
"loss": 0.0,
|
5044 |
+
"step": 717
|
5045 |
+
},
|
5046 |
+
{
|
5047 |
+
"epoch": 2.247261345852895,
|
5048 |
+
"grad_norm": NaN,
|
5049 |
+
"learning_rate": 1.4909542182169889e-05,
|
5050 |
+
"loss": 0.0,
|
5051 |
+
"step": 718
|
5052 |
+
},
|
5053 |
+
{
|
5054 |
+
"epoch": 2.2503912363067293,
|
5055 |
+
"grad_norm": NaN,
|
5056 |
+
"learning_rate": 1.4791574945353726e-05,
|
5057 |
+
"loss": 0.0,
|
5058 |
+
"step": 719
|
5059 |
+
},
|
5060 |
+
{
|
5061 |
+
"epoch": 2.2535211267605635,
|
5062 |
+
"grad_norm": NaN,
|
5063 |
+
"learning_rate": 1.4673995185590277e-05,
|
5064 |
+
"loss": 0.0,
|
5065 |
+
"step": 720
|
5066 |
+
},
|
5067 |
+
{
|
5068 |
+
"epoch": 2.2566510172143976,
|
5069 |
+
"grad_norm": NaN,
|
5070 |
+
"learning_rate": 1.455680419687267e-05,
|
5071 |
+
"loss": 0.0,
|
5072 |
+
"step": 721
|
5073 |
+
},
|
5074 |
+
{
|
5075 |
+
"epoch": 2.2597809076682314,
|
5076 |
+
"grad_norm": NaN,
|
5077 |
+
"learning_rate": 1.444000326891547e-05,
|
5078 |
+
"loss": 0.0,
|
5079 |
+
"step": 722
|
5080 |
+
},
|
5081 |
+
{
|
5082 |
+
"epoch": 2.262910798122066,
|
5083 |
+
"grad_norm": NaN,
|
5084 |
+
"learning_rate": 1.432359368714053e-05,
|
5085 |
+
"loss": 0.0,
|
5086 |
+
"step": 723
|
5087 |
+
},
|
5088 |
+
{
|
5089 |
+
"epoch": 2.2660406885758997,
|
5090 |
+
"grad_norm": NaN,
|
5091 |
+
"learning_rate": 1.4207576732662847e-05,
|
5092 |
+
"loss": 0.0,
|
5093 |
+
"step": 724
|
5094 |
+
},
|
5095 |
+
{
|
5096 |
+
"epoch": 2.269170579029734,
|
5097 |
+
"grad_norm": NaN,
|
5098 |
+
"learning_rate": 1.4091953682276494e-05,
|
5099 |
+
"loss": 0.0,
|
5100 |
+
"step": 725
|
5101 |
+
},
|
5102 |
+
{
|
5103 |
+
"epoch": 2.272300469483568,
|
5104 |
+
"grad_norm": NaN,
|
5105 |
+
"learning_rate": 1.3976725808440488e-05,
|
5106 |
+
"loss": 0.0,
|
5107 |
+
"step": 726
|
5108 |
+
},
|
5109 |
+
{
|
5110 |
+
"epoch": 2.275430359937402,
|
5111 |
+
"grad_norm": NaN,
|
5112 |
+
"learning_rate": 1.386189437926489e-05,
|
5113 |
+
"loss": 0.0,
|
5114 |
+
"step": 727
|
5115 |
+
},
|
5116 |
+
{
|
5117 |
+
"epoch": 2.2785602503912363,
|
5118 |
+
"grad_norm": NaN,
|
5119 |
+
"learning_rate": 1.3747460658496731e-05,
|
5120 |
+
"loss": 0.0,
|
5121 |
+
"step": 728
|
5122 |
+
},
|
5123 |
+
{
|
5124 |
+
"epoch": 2.2816901408450705,
|
5125 |
+
"grad_norm": NaN,
|
5126 |
+
"learning_rate": 1.3633425905506219e-05,
|
5127 |
+
"loss": 0.0,
|
5128 |
+
"step": 729
|
5129 |
+
},
|
5130 |
+
{
|
5131 |
+
"epoch": 2.2848200312989047,
|
5132 |
+
"grad_norm": NaN,
|
5133 |
+
"learning_rate": 1.3519791375272805e-05,
|
5134 |
+
"loss": 0.0,
|
5135 |
+
"step": 730
|
5136 |
+
},
|
5137 |
+
{
|
5138 |
+
"epoch": 2.287949921752739,
|
5139 |
+
"grad_norm": NaN,
|
5140 |
+
"learning_rate": 1.3406558318371377e-05,
|
5141 |
+
"loss": 0.0,
|
5142 |
+
"step": 731
|
5143 |
+
},
|
5144 |
+
{
|
5145 |
+
"epoch": 2.291079812206573,
|
5146 |
+
"grad_norm": NaN,
|
5147 |
+
"learning_rate": 1.329372798095856e-05,
|
5148 |
+
"loss": 0.0,
|
5149 |
+
"step": 732
|
5150 |
+
},
|
5151 |
+
{
|
5152 |
+
"epoch": 2.2942097026604067,
|
5153 |
+
"grad_norm": NaN,
|
5154 |
+
"learning_rate": 1.3181301604758895e-05,
|
5155 |
+
"loss": 0.0,
|
5156 |
+
"step": 733
|
5157 |
+
},
|
5158 |
+
{
|
5159 |
+
"epoch": 2.297339593114241,
|
5160 |
+
"grad_norm": NaN,
|
5161 |
+
"learning_rate": 1.30692804270513e-05,
|
5162 |
+
"loss": 0.0,
|
5163 |
+
"step": 734
|
5164 |
+
},
|
5165 |
+
{
|
5166 |
+
"epoch": 2.300469483568075,
|
5167 |
+
"grad_norm": NaN,
|
5168 |
+
"learning_rate": 1.295766568065534e-05,
|
5169 |
+
"loss": 0.0,
|
5170 |
+
"step": 735
|
5171 |
+
},
|
5172 |
+
{
|
5173 |
+
"epoch": 2.303599374021909,
|
5174 |
+
"grad_norm": NaN,
|
5175 |
+
"learning_rate": 1.284645859391772e-05,
|
5176 |
+
"loss": 0.0,
|
5177 |
+
"step": 736
|
5178 |
+
},
|
5179 |
+
{
|
5180 |
+
"epoch": 2.3067292644757433,
|
5181 |
+
"grad_norm": NaN,
|
5182 |
+
"learning_rate": 1.2735660390698744e-05,
|
5183 |
+
"loss": 0.0,
|
5184 |
+
"step": 737
|
5185 |
+
},
|
5186 |
+
{
|
5187 |
+
"epoch": 2.3098591549295775,
|
5188 |
+
"grad_norm": NaN,
|
5189 |
+
"learning_rate": 1.2625272290358902e-05,
|
5190 |
+
"loss": 0.0,
|
5191 |
+
"step": 738
|
5192 |
+
},
|
5193 |
+
{
|
5194 |
+
"epoch": 2.3129890453834117,
|
5195 |
+
"grad_norm": NaN,
|
5196 |
+
"learning_rate": 1.2515295507745328e-05,
|
5197 |
+
"loss": 0.0,
|
5198 |
+
"step": 739
|
5199 |
+
},
|
5200 |
+
{
|
5201 |
+
"epoch": 2.316118935837246,
|
5202 |
+
"grad_norm": NaN,
|
5203 |
+
"learning_rate": 1.2405731253178594e-05,
|
5204 |
+
"loss": 0.0,
|
5205 |
+
"step": 740
|
5206 |
+
},
|
5207 |
+
{
|
5208 |
+
"epoch": 2.31924882629108,
|
5209 |
+
"grad_norm": NaN,
|
5210 |
+
"learning_rate": 1.2296580732439234e-05,
|
5211 |
+
"loss": 0.0,
|
5212 |
+
"step": 741
|
5213 |
+
},
|
5214 |
+
{
|
5215 |
+
"epoch": 2.3223787167449137,
|
5216 |
+
"grad_norm": NaN,
|
5217 |
+
"learning_rate": 1.2187845146754572e-05,
|
5218 |
+
"loss": 0.0,
|
5219 |
+
"step": 742
|
5220 |
+
},
|
5221 |
+
{
|
5222 |
+
"epoch": 2.325508607198748,
|
5223 |
+
"grad_norm": NaN,
|
5224 |
+
"learning_rate": 1.207952569278546e-05,
|
5225 |
+
"loss": 0.0,
|
5226 |
+
"step": 743
|
5227 |
+
},
|
5228 |
+
{
|
5229 |
+
"epoch": 2.328638497652582,
|
5230 |
+
"grad_norm": NaN,
|
5231 |
+
"learning_rate": 1.1971623562613155e-05,
|
5232 |
+
"loss": 0.0,
|
5233 |
+
"step": 744
|
5234 |
+
},
|
5235 |
+
{
|
5236 |
+
"epoch": 2.331768388106416,
|
5237 |
+
"grad_norm": NaN,
|
5238 |
+
"learning_rate": 1.186413994372611e-05,
|
5239 |
+
"loss": 0.0,
|
5240 |
+
"step": 745
|
5241 |
+
},
|
5242 |
+
{
|
5243 |
+
"epoch": 2.3348982785602503,
|
5244 |
+
"grad_norm": NaN,
|
5245 |
+
"learning_rate": 1.1757076019007035e-05,
|
5246 |
+
"loss": 0.0,
|
5247 |
+
"step": 746
|
5248 |
+
},
|
5249 |
+
{
|
5250 |
+
"epoch": 2.3380281690140845,
|
5251 |
+
"grad_norm": NaN,
|
5252 |
+
"learning_rate": 1.1650432966719748e-05,
|
5253 |
+
"loss": 0.0,
|
5254 |
+
"step": 747
|
5255 |
+
},
|
5256 |
+
{
|
5257 |
+
"epoch": 2.3411580594679187,
|
5258 |
+
"grad_norm": NaN,
|
5259 |
+
"learning_rate": 1.1544211960496282e-05,
|
5260 |
+
"loss": 0.0,
|
5261 |
+
"step": 748
|
5262 |
+
},
|
5263 |
+
{
|
5264 |
+
"epoch": 2.344287949921753,
|
5265 |
+
"grad_norm": NaN,
|
5266 |
+
"learning_rate": 1.1438414169323947e-05,
|
5267 |
+
"loss": 0.0,
|
5268 |
+
"step": 749
|
5269 |
+
},
|
5270 |
+
{
|
5271 |
+
"epoch": 2.347417840375587,
|
5272 |
+
"grad_norm": NaN,
|
5273 |
+
"learning_rate": 1.133304075753251e-05,
|
5274 |
+
"loss": 0.0,
|
5275 |
+
"step": 750
|
5276 |
+
},
|
5277 |
+
{
|
5278 |
+
"epoch": 2.350547730829421,
|
5279 |
+
"grad_norm": NaN,
|
5280 |
+
"learning_rate": 1.1228092884781282e-05,
|
5281 |
+
"loss": 0.0,
|
5282 |
+
"step": 751
|
5283 |
+
},
|
5284 |
+
{
|
5285 |
+
"epoch": 2.3536776212832553,
|
5286 |
+
"grad_norm": NaN,
|
5287 |
+
"learning_rate": 1.1123571706046477e-05,
|
5288 |
+
"loss": 0.0,
|
5289 |
+
"step": 752
|
5290 |
+
},
|
5291 |
+
{
|
5292 |
+
"epoch": 2.356807511737089,
|
5293 |
+
"grad_norm": NaN,
|
5294 |
+
"learning_rate": 1.1019478371608377e-05,
|
5295 |
+
"loss": 0.0,
|
5296 |
+
"step": 753
|
5297 |
+
},
|
5298 |
+
{
|
5299 |
+
"epoch": 2.359937402190923,
|
5300 |
+
"grad_norm": NaN,
|
5301 |
+
"learning_rate": 1.0915814027038823e-05,
|
5302 |
+
"loss": 0.0,
|
5303 |
+
"step": 754
|
5304 |
+
},
|
5305 |
+
{
|
5306 |
+
"epoch": 2.3630672926447573,
|
5307 |
+
"grad_norm": NaN,
|
5308 |
+
"learning_rate": 1.0812579813188406e-05,
|
5309 |
+
"loss": 0.0,
|
5310 |
+
"step": 755
|
5311 |
+
},
|
5312 |
+
{
|
5313 |
+
"epoch": 2.3661971830985915,
|
5314 |
+
"grad_norm": NaN,
|
5315 |
+
"learning_rate": 1.0709776866174126e-05,
|
5316 |
+
"loss": 0.0,
|
5317 |
+
"step": 756
|
5318 |
+
},
|
5319 |
+
{
|
5320 |
+
"epoch": 2.3693270735524257,
|
5321 |
+
"grad_norm": NaN,
|
5322 |
+
"learning_rate": 1.0607406317366726e-05,
|
5323 |
+
"loss": 0.0,
|
5324 |
+
"step": 757
|
5325 |
+
},
|
5326 |
+
{
|
5327 |
+
"epoch": 2.37245696400626,
|
5328 |
+
"grad_norm": NaN,
|
5329 |
+
"learning_rate": 1.0505469293378362e-05,
|
5330 |
+
"loss": 0.0,
|
5331 |
+
"step": 758
|
5332 |
+
},
|
5333 |
+
{
|
5334 |
+
"epoch": 2.375586854460094,
|
5335 |
+
"grad_norm": NaN,
|
5336 |
+
"learning_rate": 1.0403966916050089e-05,
|
5337 |
+
"loss": 0.0,
|
5338 |
+
"step": 759
|
5339 |
+
},
|
5340 |
+
{
|
5341 |
+
"epoch": 2.378716744913928,
|
5342 |
+
"grad_norm": NaN,
|
5343 |
+
"learning_rate": 1.0302900302439617e-05,
|
5344 |
+
"loss": 0.0,
|
5345 |
+
"step": 760
|
5346 |
+
},
|
5347 |
+
{
|
5348 |
+
"epoch": 2.3818466353677623,
|
5349 |
+
"grad_norm": NaN,
|
5350 |
+
"learning_rate": 1.0202270564808947e-05,
|
5351 |
+
"loss": 0.0,
|
5352 |
+
"step": 761
|
5353 |
+
},
|
5354 |
+
{
|
5355 |
+
"epoch": 2.384976525821596,
|
5356 |
+
"grad_norm": NaN,
|
5357 |
+
"learning_rate": 1.0102078810612148e-05,
|
5358 |
+
"loss": 0.0,
|
5359 |
+
"step": 762
|
5360 |
+
},
|
5361 |
+
{
|
5362 |
+
"epoch": 2.38810641627543,
|
5363 |
+
"grad_norm": NaN,
|
5364 |
+
"learning_rate": 1.0002326142483226e-05,
|
5365 |
+
"loss": 0.0,
|
5366 |
+
"step": 763
|
5367 |
+
},
|
5368 |
+
{
|
5369 |
+
"epoch": 2.3912363067292643,
|
5370 |
+
"grad_norm": NaN,
|
5371 |
+
"learning_rate": 9.903013658223886e-06,
|
5372 |
+
"loss": 0.0,
|
5373 |
+
"step": 764
|
5374 |
+
},
|
5375 |
+
{
|
5376 |
+
"epoch": 2.3943661971830985,
|
5377 |
+
"grad_norm": NaN,
|
5378 |
+
"learning_rate": 9.804142450791526e-06,
|
5379 |
+
"loss": 0.0,
|
5380 |
+
"step": 765
|
5381 |
+
},
|
5382 |
+
{
|
5383 |
+
"epoch": 2.3974960876369327,
|
5384 |
+
"grad_norm": NaN,
|
5385 |
+
"learning_rate": 9.705713608287204e-06,
|
5386 |
+
"loss": 0.0,
|
5387 |
+
"step": 766
|
5388 |
+
},
|
5389 |
+
{
|
5390 |
+
"epoch": 2.400625978090767,
|
5391 |
+
"grad_norm": NaN,
|
5392 |
+
"learning_rate": 9.607728213943629e-06,
|
5393 |
+
"loss": 0.0,
|
5394 |
+
"step": 767
|
5395 |
+
},
|
5396 |
+
{
|
5397 |
+
"epoch": 2.403755868544601,
|
5398 |
+
"grad_norm": NaN,
|
5399 |
+
"learning_rate": 9.510187346113248e-06,
|
5400 |
+
"loss": 0.0,
|
5401 |
+
"step": 768
|
5402 |
+
},
|
5403 |
+
{
|
5404 |
+
"epoch": 2.406885758998435,
|
5405 |
+
"grad_norm": NaN,
|
5406 |
+
"learning_rate": 9.413092078256425e-06,
|
5407 |
+
"loss": 0.0,
|
5408 |
+
"step": 769
|
5409 |
+
},
|
5410 |
+
{
|
5411 |
+
"epoch": 2.4100156494522693,
|
5412 |
+
"grad_norm": NaN,
|
5413 |
+
"learning_rate": 9.316443478929542e-06,
|
5414 |
+
"loss": 0.0,
|
5415 |
+
"step": 770
|
5416 |
+
},
|
5417 |
+
{
|
5418 |
+
"epoch": 2.4131455399061035,
|
5419 |
+
"grad_norm": NaN,
|
5420 |
+
"learning_rate": 9.220242611773344e-06,
|
5421 |
+
"loss": 0.0,
|
5422 |
+
"step": 771
|
5423 |
+
},
|
5424 |
+
{
|
5425 |
+
"epoch": 2.4162754303599376,
|
5426 |
+
"grad_norm": NaN,
|
5427 |
+
"learning_rate": 9.124490535501134e-06,
|
5428 |
+
"loss": 0.0,
|
5429 |
+
"step": 772
|
5430 |
+
},
|
5431 |
+
{
|
5432 |
+
"epoch": 2.4194053208137714,
|
5433 |
+
"grad_norm": NaN,
|
5434 |
+
"learning_rate": 9.029188303887182e-06,
|
5435 |
+
"loss": 0.0,
|
5436 |
+
"step": 773
|
5437 |
+
},
|
5438 |
+
{
|
5439 |
+
"epoch": 2.4225352112676055,
|
5440 |
+
"grad_norm": NaN,
|
5441 |
+
"learning_rate": 8.934336965755091e-06,
|
5442 |
+
"loss": 0.0,
|
5443 |
+
"step": 774
|
5444 |
+
},
|
5445 |
+
{
|
5446 |
+
"epoch": 2.4256651017214397,
|
5447 |
+
"grad_norm": NaN,
|
5448 |
+
"learning_rate": 8.839937564966317e-06,
|
5449 |
+
"loss": 0.0,
|
5450 |
+
"step": 775
|
5451 |
+
},
|
5452 |
+
{
|
5453 |
+
"epoch": 2.428794992175274,
|
5454 |
+
"grad_norm": NaN,
|
5455 |
+
"learning_rate": 8.745991140408589e-06,
|
5456 |
+
"loss": 0.0,
|
5457 |
+
"step": 776
|
5458 |
+
},
|
5459 |
+
{
|
5460 |
+
"epoch": 2.431924882629108,
|
5461 |
+
"grad_norm": NaN,
|
5462 |
+
"learning_rate": 8.652498725984575e-06,
|
5463 |
+
"loss": 0.0,
|
5464 |
+
"step": 777
|
5465 |
+
},
|
5466 |
+
{
|
5467 |
+
"epoch": 2.435054773082942,
|
5468 |
+
"grad_norm": NaN,
|
5469 |
+
"learning_rate": 8.559461350600406e-06,
|
5470 |
+
"loss": 0.0,
|
5471 |
+
"step": 778
|
5472 |
+
},
|
5473 |
+
{
|
5474 |
+
"epoch": 2.4381846635367763,
|
5475 |
+
"grad_norm": NaN,
|
5476 |
+
"learning_rate": 8.466880038154457e-06,
|
5477 |
+
"loss": 0.0,
|
5478 |
+
"step": 779
|
5479 |
+
},
|
5480 |
+
{
|
5481 |
+
"epoch": 2.4413145539906105,
|
5482 |
+
"grad_norm": NaN,
|
5483 |
+
"learning_rate": 8.374755807525936e-06,
|
5484 |
+
"loss": 0.0,
|
5485 |
+
"step": 780
|
5486 |
+
},
|
5487 |
+
{
|
5488 |
+
"epoch": 2.4444444444444446,
|
5489 |
+
"grad_norm": NaN,
|
5490 |
+
"learning_rate": 8.283089672563854e-06,
|
5491 |
+
"loss": 0.0,
|
5492 |
+
"step": 781
|
5493 |
+
},
|
5494 |
+
{
|
5495 |
+
"epoch": 2.4475743348982784,
|
5496 |
+
"grad_norm": NaN,
|
5497 |
+
"learning_rate": 8.191882642075698e-06,
|
5498 |
+
"loss": 0.0,
|
5499 |
+
"step": 782
|
5500 |
+
},
|
5501 |
+
{
|
5502 |
+
"epoch": 2.4507042253521125,
|
5503 |
+
"grad_norm": NaN,
|
5504 |
+
"learning_rate": 8.101135719816461e-06,
|
5505 |
+
"loss": 0.0,
|
5506 |
+
"step": 783
|
5507 |
+
},
|
5508 |
+
{
|
5509 |
+
"epoch": 2.4538341158059467,
|
5510 |
+
"grad_norm": NaN,
|
5511 |
+
"learning_rate": 8.010849904477468e-06,
|
5512 |
+
"loss": 0.0,
|
5513 |
+
"step": 784
|
5514 |
+
},
|
5515 |
+
{
|
5516 |
+
"epoch": 2.456964006259781,
|
5517 |
+
"grad_norm": NaN,
|
5518 |
+
"learning_rate": 7.92102618967554e-06,
|
5519 |
+
"loss": 0.0,
|
5520 |
+
"step": 785
|
5521 |
+
},
|
5522 |
+
{
|
5523 |
+
"epoch": 2.460093896713615,
|
5524 |
+
"grad_norm": NaN,
|
5525 |
+
"learning_rate": 7.831665563941864e-06,
|
5526 |
+
"loss": 0.0,
|
5527 |
+
"step": 786
|
5528 |
+
},
|
5529 |
+
{
|
5530 |
+
"epoch": 2.463223787167449,
|
5531 |
+
"grad_norm": NaN,
|
5532 |
+
"learning_rate": 7.742769010711326e-06,
|
5533 |
+
"loss": 0.0,
|
5534 |
+
"step": 787
|
5535 |
+
},
|
5536 |
+
{
|
5537 |
+
"epoch": 2.4663536776212833,
|
5538 |
+
"grad_norm": NaN,
|
5539 |
+
"learning_rate": 7.65433750831151e-06,
|
5540 |
+
"loss": 0.0,
|
5541 |
+
"step": 788
|
5542 |
+
},
|
5543 |
+
{
|
5544 |
+
"epoch": 2.4694835680751175,
|
5545 |
+
"grad_norm": NaN,
|
5546 |
+
"learning_rate": 7.566372029952057e-06,
|
5547 |
+
"loss": 0.0,
|
5548 |
+
"step": 789
|
5549 |
+
},
|
5550 |
+
{
|
5551 |
+
"epoch": 2.4726134585289516,
|
5552 |
+
"grad_norm": NaN,
|
5553 |
+
"learning_rate": 7.47887354371386e-06,
|
5554 |
+
"loss": 0.0,
|
5555 |
+
"step": 790
|
5556 |
+
},
|
5557 |
+
{
|
5558 |
+
"epoch": 2.4757433489827854,
|
5559 |
+
"grad_norm": NaN,
|
5560 |
+
"learning_rate": 7.391843012538502e-06,
|
5561 |
+
"loss": 0.0,
|
5562 |
+
"step": 791
|
5563 |
+
},
|
5564 |
+
{
|
5565 |
+
"epoch": 2.4788732394366195,
|
5566 |
+
"grad_norm": NaN,
|
5567 |
+
"learning_rate": 7.3052813942175615e-06,
|
5568 |
+
"loss": 0.0,
|
5569 |
+
"step": 792
|
5570 |
+
},
|
5571 |
+
{
|
5572 |
+
"epoch": 2.4820031298904537,
|
5573 |
+
"grad_norm": NaN,
|
5574 |
+
"learning_rate": 7.219189641382129e-06,
|
5575 |
+
"loss": 0.0,
|
5576 |
+
"step": 793
|
5577 |
+
},
|
5578 |
+
{
|
5579 |
+
"epoch": 2.485133020344288,
|
5580 |
+
"grad_norm": NaN,
|
5581 |
+
"learning_rate": 7.1335687014923545e-06,
|
5582 |
+
"loss": 0.0,
|
5583 |
+
"step": 794
|
5584 |
+
},
|
5585 |
+
{
|
5586 |
+
"epoch": 2.488262910798122,
|
5587 |
+
"grad_norm": NaN,
|
5588 |
+
"learning_rate": 7.048419516826932e-06,
|
5589 |
+
"loss": 0.0,
|
5590 |
+
"step": 795
|
5591 |
+
},
|
5592 |
+
{
|
5593 |
+
"epoch": 2.491392801251956,
|
5594 |
+
"grad_norm": NaN,
|
5595 |
+
"learning_rate": 6.963743024472791e-06,
|
5596 |
+
"loss": 0.0,
|
5597 |
+
"step": 796
|
5598 |
+
},
|
5599 |
+
{
|
5600 |
+
"epoch": 2.4945226917057903,
|
5601 |
+
"grad_norm": NaN,
|
5602 |
+
"learning_rate": 6.879540156314801e-06,
|
5603 |
+
"loss": 0.0,
|
5604 |
+
"step": 797
|
5605 |
+
},
|
5606 |
+
{
|
5607 |
+
"epoch": 2.4976525821596245,
|
5608 |
+
"grad_norm": NaN,
|
5609 |
+
"learning_rate": 6.79581183902544e-06,
|
5610 |
+
"loss": 0.0,
|
5611 |
+
"step": 798
|
5612 |
+
},
|
5613 |
+
{
|
5614 |
+
"epoch": 2.5007824726134587,
|
5615 |
+
"grad_norm": NaN,
|
5616 |
+
"learning_rate": 6.7125589940546514e-06,
|
5617 |
+
"loss": 0.0,
|
5618 |
+
"step": 799
|
5619 |
+
},
|
5620 |
+
{
|
5621 |
+
"epoch": 2.5039123630672924,
|
5622 |
+
"grad_norm": NaN,
|
5623 |
+
"learning_rate": 6.6297825376197275e-06,
|
5624 |
+
"loss": 0.0,
|
5625 |
+
"step": 800
|
5626 |
+
},
|
5627 |
+
{
|
5628 |
+
"epoch": 2.507042253521127,
|
5629 |
+
"grad_norm": NaN,
|
5630 |
+
"learning_rate": 6.547483380695146e-06,
|
5631 |
+
"loss": 0.0,
|
5632 |
+
"step": 801
|
5633 |
+
},
|
5634 |
+
{
|
5635 |
+
"epoch": 2.5101721439749607,
|
5636 |
+
"grad_norm": NaN,
|
5637 |
+
"learning_rate": 6.465662429002633e-06,
|
5638 |
+
"loss": 0.0,
|
5639 |
+
"step": 802
|
5640 |
+
},
|
5641 |
+
{
|
5642 |
+
"epoch": 2.513302034428795,
|
5643 |
+
"grad_norm": NaN,
|
5644 |
+
"learning_rate": 6.384320583001113e-06,
|
5645 |
+
"loss": 0.0,
|
5646 |
+
"step": 803
|
5647 |
+
},
|
5648 |
+
{
|
5649 |
+
"epoch": 2.516431924882629,
|
5650 |
+
"grad_norm": NaN,
|
5651 |
+
"learning_rate": 6.303458737876883e-06,
|
5652 |
+
"loss": 0.0,
|
5653 |
+
"step": 804
|
5654 |
+
},
|
5655 |
+
{
|
5656 |
+
"epoch": 2.519561815336463,
|
5657 |
+
"grad_norm": NaN,
|
5658 |
+
"learning_rate": 6.223077783533649e-06,
|
5659 |
+
"loss": 0.0,
|
5660 |
+
"step": 805
|
5661 |
+
},
|
5662 |
+
{
|
5663 |
+
"epoch": 2.5226917057902973,
|
5664 |
+
"grad_norm": NaN,
|
5665 |
+
"learning_rate": 6.143178604582883e-06,
|
5666 |
+
"loss": 0.0,
|
5667 |
+
"step": 806
|
5668 |
+
},
|
5669 |
+
{
|
5670 |
+
"epoch": 2.5258215962441315,
|
5671 |
+
"grad_norm": NaN,
|
5672 |
+
"learning_rate": 6.063762080333934e-06,
|
5673 |
+
"loss": 0.0,
|
5674 |
+
"step": 807
|
5675 |
+
},
|
5676 |
+
{
|
5677 |
+
"epoch": 2.5289514866979657,
|
5678 |
+
"grad_norm": NaN,
|
5679 |
+
"learning_rate": 5.984829084784477e-06,
|
5680 |
+
"loss": 0.0,
|
5681 |
+
"step": 808
|
5682 |
+
},
|
5683 |
+
{
|
5684 |
+
"epoch": 2.5320813771518,
|
5685 |
+
"grad_norm": NaN,
|
5686 |
+
"learning_rate": 5.906380486610791e-06,
|
5687 |
+
"loss": 0.0,
|
5688 |
+
"step": 809
|
5689 |
+
},
|
5690 |
+
{
|
5691 |
+
"epoch": 2.535211267605634,
|
5692 |
+
"grad_norm": NaN,
|
5693 |
+
"learning_rate": 5.828417149158305e-06,
|
5694 |
+
"loss": 0.0,
|
5695 |
+
"step": 810
|
5696 |
+
},
|
5697 |
+
{
|
5698 |
+
"epoch": 2.5383411580594677,
|
5699 |
+
"grad_norm": NaN,
|
5700 |
+
"learning_rate": 5.750939930431959e-06,
|
5701 |
+
"loss": 0.0,
|
5702 |
+
"step": 811
|
5703 |
+
},
|
5704 |
+
{
|
5705 |
+
"epoch": 2.5414710485133023,
|
5706 |
+
"grad_norm": NaN,
|
5707 |
+
"learning_rate": 5.673949683086921e-06,
|
5708 |
+
"loss": 0.0,
|
5709 |
+
"step": 812
|
5710 |
+
},
|
5711 |
+
{
|
5712 |
+
"epoch": 2.544600938967136,
|
5713 |
+
"grad_norm": NaN,
|
5714 |
+
"learning_rate": 5.597447254419064e-06,
|
5715 |
+
"loss": 0.0,
|
5716 |
+
"step": 813
|
5717 |
+
},
|
5718 |
+
{
|
5719 |
+
"epoch": 2.54773082942097,
|
5720 |
+
"grad_norm": NaN,
|
5721 |
+
"learning_rate": 5.521433486355748e-06,
|
5722 |
+
"loss": 0.0,
|
5723 |
+
"step": 814
|
5724 |
+
},
|
5725 |
+
{
|
5726 |
+
"epoch": 2.5508607198748043,
|
5727 |
+
"grad_norm": NaN,
|
5728 |
+
"learning_rate": 5.445909215446471e-06,
|
5729 |
+
"loss": 0.0,
|
5730 |
+
"step": 815
|
5731 |
+
},
|
5732 |
+
{
|
5733 |
+
"epoch": 2.5539906103286385,
|
5734 |
+
"grad_norm": NaN,
|
5735 |
+
"learning_rate": 5.370875272853737e-06,
|
5736 |
+
"loss": 0.0,
|
5737 |
+
"step": 816
|
5738 |
+
},
|
5739 |
+
{
|
5740 |
+
"epoch": 2.5571205007824727,
|
5741 |
+
"grad_norm": NaN,
|
5742 |
+
"learning_rate": 5.296332484343813e-06,
|
5743 |
+
"loss": 0.0,
|
5744 |
+
"step": 817
|
5745 |
+
},
|
5746 |
+
{
|
5747 |
+
"epoch": 2.560250391236307,
|
5748 |
+
"grad_norm": NaN,
|
5749 |
+
"learning_rate": 5.222281670277756e-06,
|
5750 |
+
"loss": 0.0,
|
5751 |
+
"step": 818
|
5752 |
+
},
|
5753 |
+
{
|
5754 |
+
"epoch": 2.563380281690141,
|
5755 |
+
"grad_norm": NaN,
|
5756 |
+
"learning_rate": 5.148723645602288e-06,
|
5757 |
+
"loss": 0.0,
|
5758 |
+
"step": 819
|
5759 |
+
},
|
5760 |
+
{
|
5761 |
+
"epoch": 2.5665101721439747,
|
5762 |
+
"grad_norm": NaN,
|
5763 |
+
"learning_rate": 5.075659219840911e-06,
|
5764 |
+
"loss": 0.0,
|
5765 |
+
"step": 820
|
5766 |
+
},
|
5767 |
+
{
|
5768 |
+
"epoch": 2.5696400625978093,
|
5769 |
+
"grad_norm": NaN,
|
5770 |
+
"learning_rate": 5.003089197084898e-06,
|
5771 |
+
"loss": 0.0,
|
5772 |
+
"step": 821
|
5773 |
+
},
|
5774 |
+
{
|
5775 |
+
"epoch": 2.572769953051643,
|
5776 |
+
"grad_norm": NaN,
|
5777 |
+
"learning_rate": 4.93101437598455e-06,
|
5778 |
+
"loss": 0.0,
|
5779 |
+
"step": 822
|
5780 |
+
},
|
5781 |
+
{
|
5782 |
+
"epoch": 2.575899843505477,
|
5783 |
+
"grad_norm": NaN,
|
5784 |
+
"learning_rate": 4.859435549740332e-06,
|
5785 |
+
"loss": 0.0,
|
5786 |
+
"step": 823
|
5787 |
+
},
|
5788 |
+
{
|
5789 |
+
"epoch": 2.5790297339593113,
|
5790 |
+
"grad_norm": NaN,
|
5791 |
+
"learning_rate": 4.7883535060941565e-06,
|
5792 |
+
"loss": 0.0,
|
5793 |
+
"step": 824
|
5794 |
+
},
|
5795 |
+
{
|
5796 |
+
"epoch": 2.5821596244131455,
|
5797 |
+
"grad_norm": NaN,
|
5798 |
+
"learning_rate": 4.717769027320762e-06,
|
5799 |
+
"loss": 0.0,
|
5800 |
+
"step": 825
|
5801 |
+
},
|
5802 |
+
{
|
5803 |
+
"epoch": 2.5852895148669797,
|
5804 |
+
"grad_norm": NaN,
|
5805 |
+
"learning_rate": 4.647682890219041e-06,
|
5806 |
+
"loss": 0.0,
|
5807 |
+
"step": 826
|
5808 |
+
},
|
5809 |
+
{
|
5810 |
+
"epoch": 2.588419405320814,
|
5811 |
+
"grad_norm": NaN,
|
5812 |
+
"learning_rate": 4.57809586610351e-06,
|
5813 |
+
"loss": 0.0,
|
5814 |
+
"step": 827
|
5815 |
+
},
|
5816 |
+
{
|
5817 |
+
"epoch": 2.591549295774648,
|
5818 |
+
"grad_norm": NaN,
|
5819 |
+
"learning_rate": 4.50900872079586e-06,
|
5820 |
+
"loss": 0.0,
|
5821 |
+
"step": 828
|
5822 |
+
},
|
5823 |
+
{
|
5824 |
+
"epoch": 2.594679186228482,
|
5825 |
+
"grad_norm": NaN,
|
5826 |
+
"learning_rate": 4.44042221461648e-06,
|
5827 |
+
"loss": 0.0,
|
5828 |
+
"step": 829
|
5829 |
+
},
|
5830 |
+
{
|
5831 |
+
"epoch": 2.5978090766823163,
|
5832 |
+
"grad_norm": NaN,
|
5833 |
+
"learning_rate": 4.372337102376089e-06,
|
5834 |
+
"loss": 0.0,
|
5835 |
+
"step": 830
|
5836 |
+
},
|
5837 |
+
{
|
5838 |
+
"epoch": 2.60093896713615,
|
5839 |
+
"grad_norm": NaN,
|
5840 |
+
"learning_rate": 4.304754133367489e-06,
|
5841 |
+
"loss": 0.0,
|
5842 |
+
"step": 831
|
5843 |
+
},
|
5844 |
+
{
|
5845 |
+
"epoch": 2.604068857589984,
|
5846 |
+
"grad_norm": NaN,
|
5847 |
+
"learning_rate": 4.237674051357232e-06,
|
5848 |
+
"loss": 0.0,
|
5849 |
+
"step": 832
|
5850 |
+
},
|
5851 |
+
{
|
5852 |
+
"epoch": 2.6071987480438183,
|
5853 |
+
"grad_norm": NaN,
|
5854 |
+
"learning_rate": 4.1710975945775224e-06,
|
5855 |
+
"loss": 0.0,
|
5856 |
+
"step": 833
|
5857 |
+
},
|
5858 |
+
{
|
5859 |
+
"epoch": 2.6103286384976525,
|
5860 |
+
"grad_norm": NaN,
|
5861 |
+
"learning_rate": 4.1050254957180065e-06,
|
5862 |
+
"loss": 0.0,
|
5863 |
+
"step": 834
|
5864 |
+
},
|
5865 |
+
{
|
5866 |
+
"epoch": 2.6134585289514867,
|
5867 |
+
"grad_norm": NaN,
|
5868 |
+
"learning_rate": 4.039458481917796e-06,
|
5869 |
+
"loss": 0.0,
|
5870 |
+
"step": 835
|
5871 |
+
},
|
5872 |
+
{
|
5873 |
+
"epoch": 2.616588419405321,
|
5874 |
+
"grad_norm": NaN,
|
5875 |
+
"learning_rate": 3.974397274757374e-06,
|
5876 |
+
"loss": 0.0,
|
5877 |
+
"step": 836
|
5878 |
+
},
|
5879 |
+
{
|
5880 |
+
"epoch": 2.619718309859155,
|
5881 |
+
"grad_norm": NaN,
|
5882 |
+
"learning_rate": 3.9098425902507505e-06,
|
5883 |
+
"loss": 0.0,
|
5884 |
+
"step": 837
|
5885 |
+
},
|
5886 |
+
{
|
5887 |
+
"epoch": 2.622848200312989,
|
5888 |
+
"grad_norm": NaN,
|
5889 |
+
"learning_rate": 3.845795138837488e-06,
|
5890 |
+
"loss": 0.0,
|
5891 |
+
"step": 838
|
5892 |
+
},
|
5893 |
+
{
|
5894 |
+
"epoch": 2.6259780907668233,
|
5895 |
+
"grad_norm": NaN,
|
5896 |
+
"learning_rate": 3.782255625374975e-06,
|
5897 |
+
"loss": 0.0,
|
5898 |
+
"step": 839
|
5899 |
+
},
|
5900 |
+
{
|
5901 |
+
"epoch": 2.629107981220657,
|
5902 |
+
"grad_norm": NaN,
|
5903 |
+
"learning_rate": 3.719224749130579e-06,
|
5904 |
+
"loss": 0.0,
|
5905 |
+
"step": 840
|
5906 |
+
},
|
5907 |
+
{
|
5908 |
+
"epoch": 2.6322378716744916,
|
5909 |
+
"grad_norm": NaN,
|
5910 |
+
"learning_rate": 3.6567032037740424e-06,
|
5911 |
+
"loss": 0.0,
|
5912 |
+
"step": 841
|
5913 |
+
},
|
5914 |
+
{
|
5915 |
+
"epoch": 2.6353677621283254,
|
5916 |
+
"grad_norm": NaN,
|
5917 |
+
"learning_rate": 3.594691677369738e-06,
|
5918 |
+
"loss": 0.0,
|
5919 |
+
"step": 842
|
5920 |
+
},
|
5921 |
+
{
|
5922 |
+
"epoch": 2.6384976525821595,
|
5923 |
+
"grad_norm": NaN,
|
5924 |
+
"learning_rate": 3.533190852369217e-06,
|
5925 |
+
"loss": 0.0,
|
5926 |
+
"step": 843
|
5927 |
+
},
|
5928 |
+
{
|
5929 |
+
"epoch": 2.6416275430359937,
|
5930 |
+
"grad_norm": NaN,
|
5931 |
+
"learning_rate": 3.4722014056036036e-06,
|
5932 |
+
"loss": 0.0,
|
5933 |
+
"step": 844
|
5934 |
+
},
|
5935 |
+
{
|
5936 |
+
"epoch": 2.644757433489828,
|
5937 |
+
"grad_norm": NaN,
|
5938 |
+
"learning_rate": 3.4117240082762125e-06,
|
5939 |
+
"loss": 0.0,
|
5940 |
+
"step": 845
|
5941 |
+
},
|
5942 |
+
{
|
5943 |
+
"epoch": 2.647887323943662,
|
5944 |
+
"grad_norm": NaN,
|
5945 |
+
"learning_rate": 3.3517593259551004e-06,
|
5946 |
+
"loss": 0.0,
|
5947 |
+
"step": 846
|
5948 |
+
},
|
5949 |
+
{
|
5950 |
+
"epoch": 2.651017214397496,
|
5951 |
+
"grad_norm": NaN,
|
5952 |
+
"learning_rate": 3.292308018565815e-06,
|
5953 |
+
"loss": 0.0,
|
5954 |
+
"step": 847
|
5955 |
+
},
|
5956 |
+
{
|
5957 |
+
"epoch": 2.6541471048513303,
|
5958 |
+
"grad_norm": NaN,
|
5959 |
+
"learning_rate": 3.233370740384056e-06,
|
5960 |
+
"loss": 0.0,
|
5961 |
+
"step": 848
|
5962 |
+
},
|
5963 |
+
{
|
5964 |
+
"epoch": 2.6572769953051645,
|
5965 |
+
"grad_norm": NaN,
|
5966 |
+
"learning_rate": 3.1749481400285162e-06,
|
5967 |
+
"loss": 0.0,
|
5968 |
+
"step": 849
|
5969 |
+
},
|
5970 |
+
{
|
5971 |
+
"epoch": 2.6604068857589986,
|
5972 |
+
"grad_norm": NaN,
|
5973 |
+
"learning_rate": 3.117040860453746e-06,
|
5974 |
+
"loss": 0.0,
|
5975 |
+
"step": 850
|
5976 |
+
},
|
5977 |
+
{
|
5978 |
+
"epoch": 2.6635367762128324,
|
5979 |
+
"grad_norm": NaN,
|
5980 |
+
"learning_rate": 3.059649538943077e-06,
|
5981 |
+
"loss": 0.0,
|
5982 |
+
"step": 851
|
5983 |
+
},
|
5984 |
+
{
|
5985 |
+
"epoch": 2.6666666666666665,
|
5986 |
+
"grad_norm": NaN,
|
5987 |
+
"learning_rate": 3.0027748071015716e-06,
|
5988 |
+
"loss": 0.0,
|
5989 |
+
"step": 852
|
5990 |
+
},
|
5991 |
+
{
|
5992 |
+
"epoch": 2.6697965571205007,
|
5993 |
+
"grad_norm": NaN,
|
5994 |
+
"learning_rate": 2.946417290849135e-06,
|
5995 |
+
"loss": 0.0,
|
5996 |
+
"step": 853
|
5997 |
+
},
|
5998 |
+
{
|
5999 |
+
"epoch": 2.672926447574335,
|
6000 |
+
"grad_norm": NaN,
|
6001 |
+
"learning_rate": 2.8905776104135696e-06,
|
6002 |
+
"loss": 0.0,
|
6003 |
+
"step": 854
|
6004 |
+
},
|
6005 |
+
{
|
6006 |
+
"epoch": 2.676056338028169,
|
6007 |
+
"grad_norm": NaN,
|
6008 |
+
"learning_rate": 2.83525638032377e-06,
|
6009 |
+
"loss": 0.0,
|
6010 |
+
"step": 855
|
6011 |
+
},
|
6012 |
+
{
|
6013 |
+
"epoch": 2.679186228482003,
|
6014 |
+
"grad_norm": NaN,
|
6015 |
+
"learning_rate": 2.7804542094029663e-06,
|
6016 |
+
"loss": 0.0,
|
6017 |
+
"step": 856
|
6018 |
+
},
|
6019 |
+
{
|
6020 |
+
"epoch": 2.6823161189358373,
|
6021 |
+
"grad_norm": NaN,
|
6022 |
+
"learning_rate": 2.7261717007620426e-06,
|
6023 |
+
"loss": 0.0,
|
6024 |
+
"step": 857
|
6025 |
+
},
|
6026 |
+
{
|
6027 |
+
"epoch": 2.6854460093896715,
|
6028 |
+
"grad_norm": NaN,
|
6029 |
+
"learning_rate": 2.67240945179284e-06,
|
6030 |
+
"loss": 0.0,
|
6031 |
+
"step": 858
|
6032 |
+
},
|
6033 |
+
{
|
6034 |
+
"epoch": 2.6885758998435056,
|
6035 |
+
"grad_norm": NaN,
|
6036 |
+
"learning_rate": 2.619168054161647e-06,
|
6037 |
+
"loss": 0.0,
|
6038 |
+
"step": 859
|
6039 |
+
},
|
6040 |
+
{
|
6041 |
+
"epoch": 2.6917057902973394,
|
6042 |
+
"grad_norm": NaN,
|
6043 |
+
"learning_rate": 2.5664480938026425e-06,
|
6044 |
+
"loss": 0.0,
|
6045 |
+
"step": 860
|
6046 |
+
},
|
6047 |
+
{
|
6048 |
+
"epoch": 2.694835680751174,
|
6049 |
+
"grad_norm": NaN,
|
6050 |
+
"learning_rate": 2.514250150911468e-06,
|
6051 |
+
"loss": 0.0,
|
6052 |
+
"step": 861
|
6053 |
+
},
|
6054 |
+
{
|
6055 |
+
"epoch": 2.6979655712050077,
|
6056 |
+
"grad_norm": NaN,
|
6057 |
+
"learning_rate": 2.4625747999388493e-06,
|
6058 |
+
"loss": 0.0,
|
6059 |
+
"step": 862
|
6060 |
+
},
|
6061 |
+
{
|
6062 |
+
"epoch": 2.701095461658842,
|
6063 |
+
"grad_norm": NaN,
|
6064 |
+
"learning_rate": 2.411422609584246e-06,
|
6065 |
+
"loss": 0.0,
|
6066 |
+
"step": 863
|
6067 |
+
},
|
6068 |
+
{
|
6069 |
+
"epoch": 2.704225352112676,
|
6070 |
+
"grad_norm": NaN,
|
6071 |
+
"learning_rate": 2.360794142789635e-06,
|
6072 |
+
"loss": 0.0,
|
6073 |
+
"step": 864
|
6074 |
+
},
|
6075 |
+
{
|
6076 |
+
"epoch": 2.70735524256651,
|
6077 |
+
"grad_norm": NaN,
|
6078 |
+
"learning_rate": 2.31068995673327e-06,
|
6079 |
+
"loss": 0.0,
|
6080 |
+
"step": 865
|
6081 |
+
},
|
6082 |
+
{
|
6083 |
+
"epoch": 2.7104851330203443,
|
6084 |
+
"grad_norm": NaN,
|
6085 |
+
"learning_rate": 2.261110602823591e-06,
|
6086 |
+
"loss": 0.0,
|
6087 |
+
"step": 866
|
6088 |
+
},
|
6089 |
+
{
|
6090 |
+
"epoch": 2.7136150234741785,
|
6091 |
+
"grad_norm": NaN,
|
6092 |
+
"learning_rate": 2.212056626693115e-06,
|
6093 |
+
"loss": 0.0,
|
6094 |
+
"step": 867
|
6095 |
+
},
|
6096 |
+
{
|
6097 |
+
"epoch": 2.7167449139280127,
|
6098 |
+
"grad_norm": NaN,
|
6099 |
+
"learning_rate": 2.1635285681924632e-06,
|
6100 |
+
"loss": 0.0,
|
6101 |
+
"step": 868
|
6102 |
+
},
|
6103 |
+
{
|
6104 |
+
"epoch": 2.7198748043818464,
|
6105 |
+
"grad_norm": NaN,
|
6106 |
+
"learning_rate": 2.1155269613844066e-06,
|
6107 |
+
"loss": 0.0,
|
6108 |
+
"step": 869
|
6109 |
+
},
|
6110 |
+
{
|
6111 |
+
"epoch": 2.723004694835681,
|
6112 |
+
"grad_norm": NaN,
|
6113 |
+
"learning_rate": 2.0680523345379953e-06,
|
6114 |
+
"loss": 0.0,
|
6115 |
+
"step": 870
|
6116 |
+
},
|
6117 |
+
{
|
6118 |
+
"epoch": 2.7261345852895147,
|
6119 |
+
"grad_norm": NaN,
|
6120 |
+
"learning_rate": 2.0211052101227313e-06,
|
6121 |
+
"loss": 0.0,
|
6122 |
+
"step": 871
|
6123 |
+
},
|
6124 |
+
{
|
6125 |
+
"epoch": 2.729264475743349,
|
6126 |
+
"grad_norm": NaN,
|
6127 |
+
"learning_rate": 1.974686104802842e-06,
|
6128 |
+
"loss": 0.0,
|
6129 |
+
"step": 872
|
6130 |
+
},
|
6131 |
+
{
|
6132 |
+
"epoch": 2.732394366197183,
|
6133 |
+
"grad_norm": NaN,
|
6134 |
+
"learning_rate": 1.9287955294315652e-06,
|
6135 |
+
"loss": 0.0,
|
6136 |
+
"step": 873
|
6137 |
+
},
|
6138 |
+
{
|
6139 |
+
"epoch": 2.735524256651017,
|
6140 |
+
"grad_norm": NaN,
|
6141 |
+
"learning_rate": 1.8834339890455466e-06,
|
6142 |
+
"loss": 0.0,
|
6143 |
+
"step": 874
|
6144 |
+
},
|
6145 |
+
{
|
6146 |
+
"epoch": 2.7386541471048513,
|
6147 |
+
"grad_norm": NaN,
|
6148 |
+
"learning_rate": 1.8386019828592726e-06,
|
6149 |
+
"loss": 0.0,
|
6150 |
+
"step": 875
|
6151 |
+
},
|
6152 |
+
{
|
6153 |
+
"epoch": 2.7417840375586855,
|
6154 |
+
"grad_norm": NaN,
|
6155 |
+
"learning_rate": 1.7943000042595914e-06,
|
6156 |
+
"loss": 0.0,
|
6157 |
+
"step": 876
|
6158 |
+
},
|
6159 |
+
{
|
6160 |
+
"epoch": 2.7449139280125197,
|
6161 |
+
"grad_norm": NaN,
|
6162 |
+
"learning_rate": 1.7505285408002615e-06,
|
6163 |
+
"loss": 0.0,
|
6164 |
+
"step": 877
|
6165 |
+
},
|
6166 |
+
{
|
6167 |
+
"epoch": 2.748043818466354,
|
6168 |
+
"grad_norm": NaN,
|
6169 |
+
"learning_rate": 1.7072880741966002e-06,
|
6170 |
+
"loss": 0.0,
|
6171 |
+
"step": 878
|
6172 |
+
},
|
6173 |
+
{
|
6174 |
+
"epoch": 2.751173708920188,
|
6175 |
+
"grad_norm": NaN,
|
6176 |
+
"learning_rate": 1.6645790803201889e-06,
|
6177 |
+
"loss": 0.0,
|
6178 |
+
"step": 879
|
6179 |
+
},
|
6180 |
+
{
|
6181 |
+
"epoch": 2.7543035993740217,
|
6182 |
+
"grad_norm": NaN,
|
6183 |
+
"learning_rate": 1.6224020291935982e-06,
|
6184 |
+
"loss": 0.0,
|
6185 |
+
"step": 880
|
6186 |
+
},
|
6187 |
+
{
|
6188 |
+
"epoch": 2.7574334898278563,
|
6189 |
+
"grad_norm": NaN,
|
6190 |
+
"learning_rate": 1.5807573849852653e-06,
|
6191 |
+
"loss": 0.0,
|
6192 |
+
"step": 881
|
6193 |
+
},
|
6194 |
+
{
|
6195 |
+
"epoch": 2.76056338028169,
|
6196 |
+
"grad_norm": NaN,
|
6197 |
+
"learning_rate": 1.5396456060043695e-06,
|
6198 |
+
"loss": 0.0,
|
6199 |
+
"step": 882
|
6200 |
+
},
|
6201 |
+
{
|
6202 |
+
"epoch": 2.763693270735524,
|
6203 |
+
"grad_norm": NaN,
|
6204 |
+
"learning_rate": 1.4990671446957594e-06,
|
6205 |
+
"loss": 0.0,
|
6206 |
+
"step": 883
|
6207 |
+
},
|
6208 |
+
{
|
6209 |
+
"epoch": 2.7668231611893583,
|
6210 |
+
"grad_norm": NaN,
|
6211 |
+
"learning_rate": 1.4590224476350167e-06,
|
6212 |
+
"loss": 0.0,
|
6213 |
+
"step": 884
|
6214 |
+
},
|
6215 |
+
{
|
6216 |
+
"epoch": 2.7699530516431925,
|
6217 |
+
"grad_norm": NaN,
|
6218 |
+
"learning_rate": 1.4195119555235114e-06,
|
6219 |
+
"loss": 0.0,
|
6220 |
+
"step": 885
|
6221 |
+
},
|
6222 |
+
{
|
6223 |
+
"epoch": 2.7730829420970267,
|
6224 |
+
"grad_norm": NaN,
|
6225 |
+
"learning_rate": 1.3805361031835552e-06,
|
6226 |
+
"loss": 0.0,
|
6227 |
+
"step": 886
|
6228 |
+
},
|
6229 |
+
{
|
6230 |
+
"epoch": 2.776212832550861,
|
6231 |
+
"grad_norm": NaN,
|
6232 |
+
"learning_rate": 1.3420953195536323e-06,
|
6233 |
+
"loss": 0.0,
|
6234 |
+
"step": 887
|
6235 |
+
},
|
6236 |
+
{
|
6237 |
+
"epoch": 2.779342723004695,
|
6238 |
+
"grad_norm": NaN,
|
6239 |
+
"learning_rate": 1.3041900276836715e-06,
|
6240 |
+
"loss": 0.0,
|
6241 |
+
"step": 888
|
6242 |
+
},
|
6243 |
+
{
|
6244 |
+
"epoch": 2.7824726134585287,
|
6245 |
+
"grad_norm": NaN,
|
6246 |
+
"learning_rate": 1.2668206447303766e-06,
|
6247 |
+
"loss": 0.0,
|
6248 |
+
"step": 889
|
6249 |
+
},
|
6250 |
+
{
|
6251 |
+
"epoch": 2.7856025039123633,
|
6252 |
+
"grad_norm": NaN,
|
6253 |
+
"learning_rate": 1.2299875819526629e-06,
|
6254 |
+
"loss": 0.0,
|
6255 |
+
"step": 890
|
6256 |
+
},
|
6257 |
+
{
|
6258 |
+
"epoch": 2.788732394366197,
|
6259 |
+
"grad_norm": NaN,
|
6260 |
+
"learning_rate": 1.1936912447070903e-06,
|
6261 |
+
"loss": 0.0,
|
6262 |
+
"step": 891
|
6263 |
+
},
|
6264 |
+
{
|
6265 |
+
"epoch": 2.791862284820031,
|
6266 |
+
"grad_norm": NaN,
|
6267 |
+
"learning_rate": 1.157932032443465e-06,
|
6268 |
+
"loss": 0.0,
|
6269 |
+
"step": 892
|
6270 |
+
},
|
6271 |
+
{
|
6272 |
+
"epoch": 2.7949921752738653,
|
6273 |
+
"grad_norm": NaN,
|
6274 |
+
"learning_rate": 1.1227103387003667e-06,
|
6275 |
+
"loss": 0.0,
|
6276 |
+
"step": 893
|
6277 |
+
},
|
6278 |
+
{
|
6279 |
+
"epoch": 2.7981220657276995,
|
6280 |
+
"grad_norm": NaN,
|
6281 |
+
"learning_rate": 1.0880265511008736e-06,
|
6282 |
+
"loss": 0.0,
|
6283 |
+
"step": 894
|
6284 |
+
},
|
6285 |
+
{
|
6286 |
+
"epoch": 2.8012519561815337,
|
6287 |
+
"grad_norm": NaN,
|
6288 |
+
"learning_rate": 1.0538810513482933e-06,
|
6289 |
+
"loss": 0.0,
|
6290 |
+
"step": 895
|
6291 |
+
},
|
6292 |
+
{
|
6293 |
+
"epoch": 2.804381846635368,
|
6294 |
+
"grad_norm": NaN,
|
6295 |
+
"learning_rate": 1.0202742152219114e-06,
|
6296 |
+
"loss": 0.0,
|
6297 |
+
"step": 896
|
6298 |
+
},
|
6299 |
+
{
|
6300 |
+
"epoch": 2.807511737089202,
|
6301 |
+
"grad_norm": NaN,
|
6302 |
+
"learning_rate": 9.872064125729386e-07,
|
6303 |
+
"loss": 0.0,
|
6304 |
+
"step": 897
|
6305 |
+
},
|
6306 |
+
{
|
6307 |
+
"epoch": 2.810641627543036,
|
6308 |
+
"grad_norm": NaN,
|
6309 |
+
"learning_rate": 9.546780073203588e-07,
|
6310 |
+
"loss": 0.0,
|
6311 |
+
"step": 898
|
6312 |
+
},
|
6313 |
+
{
|
6314 |
+
"epoch": 2.8137715179968703,
|
6315 |
+
"grad_norm": NaN,
|
6316 |
+
"learning_rate": 9.226893574469708e-07,
|
6317 |
+
"loss": 0.0,
|
6318 |
+
"step": 899
|
6319 |
+
},
|
6320 |
+
{
|
6321 |
+
"epoch": 2.816901408450704,
|
6322 |
+
"grad_norm": NaN,
|
6323 |
+
"learning_rate": 8.912408149954421e-07,
|
6324 |
+
"loss": 0.0,
|
6325 |
+
"step": 900
|
6326 |
+
},
|
6327 |
+
{
|
6328 |
+
"epoch": 2.820031298904538,
|
6329 |
+
"grad_norm": NaN,
|
6330 |
+
"learning_rate": 8.603327260644278e-07,
|
6331 |
+
"loss": 0.0,
|
6332 |
+
"step": 901
|
6333 |
+
},
|
6334 |
+
{
|
6335 |
+
"epoch": 2.8231611893583723,
|
6336 |
+
"grad_norm": NaN,
|
6337 |
+
"learning_rate": 8.299654308047633e-07,
|
6338 |
+
"loss": 0.0,
|
6339 |
+
"step": 902
|
6340 |
+
},
|
6341 |
+
{
|
6342 |
+
"epoch": 2.8262910798122065,
|
6343 |
+
"grad_norm": NaN,
|
6344 |
+
"learning_rate": 8.001392634157223e-07,
|
6345 |
+
"loss": 0.0,
|
6346 |
+
"step": 903
|
6347 |
+
},
|
6348 |
+
{
|
6349 |
+
"epoch": 2.8294209702660407,
|
6350 |
+
"grad_norm": NaN,
|
6351 |
+
"learning_rate": 7.708545521413424e-07,
|
6352 |
+
"loss": 0.0,
|
6353 |
+
"step": 904
|
6354 |
+
},
|
6355 |
+
{
|
6356 |
+
"epoch": 2.832550860719875,
|
6357 |
+
"grad_norm": NaN,
|
6358 |
+
"learning_rate": 7.421116192667943e-07,
|
6359 |
+
"loss": 0.0,
|
6360 |
+
"step": 905
|
6361 |
+
},
|
6362 |
+
{
|
6363 |
+
"epoch": 2.835680751173709,
|
6364 |
+
"grad_norm": NaN,
|
6365 |
+
"learning_rate": 7.139107811148626e-07,
|
6366 |
+
"loss": 0.0,
|
6367 |
+
"step": 906
|
6368 |
+
},
|
6369 |
+
{
|
6370 |
+
"epoch": 2.838810641627543,
|
6371 |
+
"grad_norm": NaN,
|
6372 |
+
"learning_rate": 6.862523480424488e-07,
|
6373 |
+
"loss": 0.0,
|
6374 |
+
"step": 907
|
6375 |
+
},
|
6376 |
+
{
|
6377 |
+
"epoch": 2.8419405320813773,
|
6378 |
+
"grad_norm": NaN,
|
6379 |
+
"learning_rate": 6.591366244371566e-07,
|
6380 |
+
"loss": 0.0,
|
6381 |
+
"step": 908
|
6382 |
+
},
|
6383 |
+
{
|
6384 |
+
"epoch": 2.845070422535211,
|
6385 |
+
"grad_norm": NaN,
|
6386 |
+
"learning_rate": 6.325639087139401e-07,
|
6387 |
+
"loss": 0.0,
|
6388 |
+
"step": 909
|
6389 |
+
},
|
6390 |
+
{
|
6391 |
+
"epoch": 2.8482003129890456,
|
6392 |
+
"grad_norm": NaN,
|
6393 |
+
"learning_rate": 6.065344933118333e-07,
|
6394 |
+
"loss": 0.0,
|
6395 |
+
"step": 910
|
6396 |
+
},
|
6397 |
+
{
|
6398 |
+
"epoch": 2.8513302034428794,
|
6399 |
+
"grad_norm": NaN,
|
6400 |
+
"learning_rate": 5.810486646907087e-07,
|
6401 |
+
"loss": 0.0,
|
6402 |
+
"step": 911
|
6403 |
+
},
|
6404 |
+
{
|
6405 |
+
"epoch": 2.8544600938967135,
|
6406 |
+
"grad_norm": NaN,
|
6407 |
+
"learning_rate": 5.561067033281464e-07,
|
6408 |
+
"loss": 0.0,
|
6409 |
+
"step": 912
|
6410 |
+
},
|
6411 |
+
{
|
6412 |
+
"epoch": 2.8575899843505477,
|
6413 |
+
"grad_norm": NaN,
|
6414 |
+
"learning_rate": 5.317088837163309e-07,
|
6415 |
+
"loss": 0.0,
|
6416 |
+
"step": 913
|
6417 |
+
},
|
6418 |
+
{
|
6419 |
+
"epoch": 2.860719874804382,
|
6420 |
+
"grad_norm": NaN,
|
6421 |
+
"learning_rate": 5.07855474359048e-07,
|
6422 |
+
"loss": 0.0,
|
6423 |
+
"step": 914
|
6424 |
+
},
|
6425 |
+
{
|
6426 |
+
"epoch": 2.863849765258216,
|
6427 |
+
"grad_norm": NaN,
|
6428 |
+
"learning_rate": 4.845467377687152e-07,
|
6429 |
+
"loss": 0.0,
|
6430 |
+
"step": 915
|
6431 |
+
},
|
6432 |
+
{
|
6433 |
+
"epoch": 2.86697965571205,
|
6434 |
+
"grad_norm": NaN,
|
6435 |
+
"learning_rate": 4.617829304634891e-07,
|
6436 |
+
"loss": 0.0,
|
6437 |
+
"step": 916
|
6438 |
+
},
|
6439 |
+
{
|
6440 |
+
"epoch": 2.8701095461658843,
|
6441 |
+
"grad_norm": NaN,
|
6442 |
+
"learning_rate": 4.395643029644736e-07,
|
6443 |
+
"loss": 0.0,
|
6444 |
+
"step": 917
|
6445 |
+
},
|
6446 |
+
{
|
6447 |
+
"epoch": 2.873239436619718,
|
6448 |
+
"grad_norm": NaN,
|
6449 |
+
"learning_rate": 4.1789109979291083e-07,
|
6450 |
+
"loss": 0.0,
|
6451 |
+
"step": 918
|
6452 |
+
},
|
6453 |
+
{
|
6454 |
+
"epoch": 2.8763693270735526,
|
6455 |
+
"grad_norm": NaN,
|
6456 |
+
"learning_rate": 3.967635594675334e-07,
|
6457 |
+
"loss": 0.0,
|
6458 |
+
"step": 919
|
6459 |
+
},
|
6460 |
+
{
|
6461 |
+
"epoch": 2.8794992175273864,
|
6462 |
+
"grad_norm": NaN,
|
6463 |
+
"learning_rate": 3.7618191450192185e-07,
|
6464 |
+
"loss": 0.0,
|
6465 |
+
"step": 920
|
6466 |
+
},
|
6467 |
+
{
|
6468 |
+
"epoch": 2.8826291079812205,
|
6469 |
+
"grad_norm": NaN,
|
6470 |
+
"learning_rate": 3.561463914019625e-07,
|
6471 |
+
"loss": 0.0,
|
6472 |
+
"step": 921
|
6473 |
+
},
|
6474 |
+
{
|
6475 |
+
"epoch": 2.8857589984350547,
|
6476 |
+
"grad_norm": NaN,
|
6477 |
+
"learning_rate": 3.3665721066332144e-07,
|
6478 |
+
"loss": 0.0,
|
6479 |
+
"step": 922
|
6480 |
+
},
|
6481 |
+
{
|
6482 |
+
"epoch": 2.888888888888889,
|
6483 |
+
"grad_norm": NaN,
|
6484 |
+
"learning_rate": 3.177145867690523e-07,
|
6485 |
+
"loss": 0.0,
|
6486 |
+
"step": 923
|
6487 |
+
},
|
6488 |
+
{
|
6489 |
+
"epoch": 2.892018779342723,
|
6490 |
+
"grad_norm": NaN,
|
6491 |
+
"learning_rate": 2.9931872818720895e-07,
|
6492 |
+
"loss": 0.0,
|
6493 |
+
"step": 924
|
6494 |
+
},
|
6495 |
+
{
|
6496 |
+
"epoch": 2.895148669796557,
|
6497 |
+
"grad_norm": NaN,
|
6498 |
+
"learning_rate": 2.8146983736857536e-07,
|
6499 |
+
"loss": 0.0,
|
6500 |
+
"step": 925
|
6501 |
+
},
|
6502 |
+
{
|
6503 |
+
"epoch": 2.8982785602503913,
|
6504 |
+
"grad_norm": NaN,
|
6505 |
+
"learning_rate": 2.6416811074441717e-07,
|
6506 |
+
"loss": 0.0,
|
6507 |
+
"step": 926
|
6508 |
+
},
|
6509 |
+
{
|
6510 |
+
"epoch": 2.9014084507042255,
|
6511 |
+
"grad_norm": NaN,
|
6512 |
+
"learning_rate": 2.474137387243336e-07,
|
6513 |
+
"loss": 0.0,
|
6514 |
+
"step": 927
|
6515 |
+
},
|
6516 |
+
{
|
6517 |
+
"epoch": 2.9045383411580596,
|
6518 |
+
"grad_norm": NaN,
|
6519 |
+
"learning_rate": 2.3120690569414793e-07,
|
6520 |
+
"loss": 0.0,
|
6521 |
+
"step": 928
|
6522 |
+
},
|
6523 |
+
{
|
6524 |
+
"epoch": 2.9076682316118934,
|
6525 |
+
"grad_norm": NaN,
|
6526 |
+
"learning_rate": 2.1554779001390357e-07,
|
6527 |
+
"loss": 0.0,
|
6528 |
+
"step": 929
|
6529 |
+
},
|
6530 |
+
{
|
6531 |
+
"epoch": 2.910798122065728,
|
6532 |
+
"grad_norm": NaN,
|
6533 |
+
"learning_rate": 2.0043656401587675e-07,
|
6534 |
+
"loss": 0.0,
|
6535 |
+
"step": 930
|
6536 |
+
},
|
6537 |
+
{
|
6538 |
+
"epoch": 2.9139280125195617,
|
6539 |
+
"grad_norm": NaN,
|
6540 |
+
"learning_rate": 1.85873394002678e-07,
|
6541 |
+
"loss": 0.0,
|
6542 |
+
"step": 931
|
6543 |
+
},
|
6544 |
+
{
|
6545 |
+
"epoch": 2.917057902973396,
|
6546 |
+
"grad_norm": NaN,
|
6547 |
+
"learning_rate": 1.7185844024544816e-07,
|
6548 |
+
"loss": 0.0,
|
6549 |
+
"step": 932
|
6550 |
+
},
|
6551 |
+
{
|
6552 |
+
"epoch": 2.92018779342723,
|
6553 |
+
"grad_norm": NaN,
|
6554 |
+
"learning_rate": 1.5839185698206526e-07,
|
6555 |
+
"loss": 0.0,
|
6556 |
+
"step": 933
|
6557 |
+
},
|
6558 |
+
{
|
6559 |
+
"epoch": 2.923317683881064,
|
6560 |
+
"grad_norm": NaN,
|
6561 |
+
"learning_rate": 1.4547379241547364e-07,
|
6562 |
+
"loss": 0.0,
|
6563 |
+
"step": 934
|
6564 |
+
},
|
6565 |
+
{
|
6566 |
+
"epoch": 2.9264475743348983,
|
6567 |
+
"grad_norm": NaN,
|
6568 |
+
"learning_rate": 1.3310438871202424e-07,
|
6569 |
+
"loss": 0.0,
|
6570 |
+
"step": 935
|
6571 |
+
},
|
6572 |
+
{
|
6573 |
+
"epoch": 2.9295774647887325,
|
6574 |
+
"grad_norm": NaN,
|
6575 |
+
"learning_rate": 1.212837819999424e-07,
|
6576 |
+
"loss": 0.0,
|
6577 |
+
"step": 936
|
6578 |
+
},
|
6579 |
+
{
|
6580 |
+
"epoch": 2.9327073552425666,
|
6581 |
+
"grad_norm": NaN,
|
6582 |
+
"learning_rate": 1.1001210236779025e-07,
|
6583 |
+
"loss": 0.0,
|
6584 |
+
"step": 937
|
6585 |
+
},
|
6586 |
+
{
|
6587 |
+
"epoch": 2.9358372456964004,
|
6588 |
+
"grad_norm": NaN,
|
6589 |
+
"learning_rate": 9.928947386308452e-08,
|
6590 |
+
"loss": 0.0,
|
6591 |
+
"step": 938
|
6592 |
+
},
|
6593 |
+
{
|
6594 |
+
"epoch": 2.938967136150235,
|
6595 |
+
"grad_norm": NaN,
|
6596 |
+
"learning_rate": 8.911601449087537e-08,
|
6597 |
+
"loss": 0.0,
|
6598 |
+
"step": 939
|
6599 |
+
},
|
6600 |
+
{
|
6601 |
+
"epoch": 2.9420970266040687,
|
6602 |
+
"grad_norm": NaN,
|
6603 |
+
"learning_rate": 7.949183621250856e-08,
|
6604 |
+
"loss": 0.0,
|
6605 |
+
"step": 940
|
6606 |
+
},
|
6607 |
+
{
|
6608 |
+
"epoch": 2.945226917057903,
|
6609 |
+
"grad_norm": NaN,
|
6610 |
+
"learning_rate": 7.0417044944332e-08,
|
6611 |
+
"loss": 0.0,
|
6612 |
+
"step": 941
|
6613 |
+
},
|
6614 |
+
{
|
6615 |
+
"epoch": 2.948356807511737,
|
6616 |
+
"grad_norm": NaN,
|
6617 |
+
"learning_rate": 6.189174055657442e-08,
|
6618 |
+
"loss": 0.0,
|
6619 |
+
"step": 942
|
6620 |
+
},
|
6621 |
+
{
|
6622 |
+
"epoch": 2.951486697965571,
|
6623 |
+
"grad_norm": NaN,
|
6624 |
+
"learning_rate": 5.391601687222969e-08,
|
6625 |
+
"loss": 0.0,
|
6626 |
+
"step": 943
|
6627 |
+
},
|
6628 |
+
{
|
6629 |
+
"epoch": 2.9546165884194053,
|
6630 |
+
"grad_norm": NaN,
|
6631 |
+
"learning_rate": 4.64899616660297e-08,
|
6632 |
+
"loss": 0.0,
|
6633 |
+
"step": 944
|
6634 |
+
},
|
6635 |
+
{
|
6636 |
+
"epoch": 2.9577464788732395,
|
6637 |
+
"grad_norm": NaN,
|
6638 |
+
"learning_rate": 3.961365666346195e-08,
|
6639 |
+
"loss": 0.0,
|
6640 |
+
"step": 945
|
6641 |
+
},
|
6642 |
+
{
|
6643 |
+
"epoch": 2.9608763693270737,
|
6644 |
+
"grad_norm": NaN,
|
6645 |
+
"learning_rate": 3.3287177539892414e-08,
|
6646 |
+
"loss": 0.0,
|
6647 |
+
"step": 946
|
6648 |
+
},
|
6649 |
+
{
|
6650 |
+
"epoch": 2.964006259780908,
|
6651 |
+
"grad_norm": NaN,
|
6652 |
+
"learning_rate": 2.7510593919727323e-08,
|
6653 |
+
"loss": 0.0,
|
6654 |
+
"step": 947
|
6655 |
+
},
|
6656 |
+
{
|
6657 |
+
"epoch": 2.967136150234742,
|
6658 |
+
"grad_norm": NaN,
|
6659 |
+
"learning_rate": 2.2283969375630488e-08,
|
6660 |
+
"loss": 0.0,
|
6661 |
+
"step": 948
|
6662 |
+
},
|
6663 |
+
{
|
6664 |
+
"epoch": 2.9702660406885757,
|
6665 |
+
"grad_norm": NaN,
|
6666 |
+
"learning_rate": 1.7607361427846026e-08,
|
6667 |
+
"loss": 0.0,
|
6668 |
+
"step": 949
|
6669 |
+
},
|
6670 |
+
{
|
6671 |
+
"epoch": 2.97339593114241,
|
6672 |
+
"grad_norm": NaN,
|
6673 |
+
"learning_rate": 1.3480821543554457e-08,
|
6674 |
+
"loss": 0.0,
|
6675 |
+
"step": 950
|
6676 |
+
},
|
6677 |
+
{
|
6678 |
+
"epoch": 2.976525821596244,
|
6679 |
+
"grad_norm": NaN,
|
6680 |
+
"learning_rate": 9.904395136295374e-09,
|
6681 |
+
"loss": 0.0,
|
6682 |
+
"step": 951
|
6683 |
+
},
|
6684 |
+
{
|
6685 |
+
"epoch": 2.979655712050078,
|
6686 |
+
"grad_norm": NaN,
|
6687 |
+
"learning_rate": 6.87812156549561e-09,
|
6688 |
+
"loss": 0.0,
|
6689 |
+
"step": 952
|
6690 |
+
},
|
6691 |
+
{
|
6692 |
+
"epoch": 2.9827856025039123,
|
6693 |
+
"grad_norm": NaN,
|
6694 |
+
"learning_rate": 4.402034136008482e-09,
|
6695 |
+
"loss": 0.0,
|
6696 |
+
"step": 953
|
6697 |
+
},
|
6698 |
+
{
|
6699 |
+
"epoch": 2.9859154929577465,
|
6700 |
+
"grad_norm": NaN,
|
6701 |
+
"learning_rate": 2.47616009776408e-09,
|
6702 |
+
"loss": 0.0,
|
6703 |
+
"step": 954
|
6704 |
+
},
|
6705 |
+
{
|
6706 |
+
"epoch": 2.9890453834115807,
|
6707 |
+
"grad_norm": NaN,
|
6708 |
+
"learning_rate": 1.1005206454584027e-09,
|
6709 |
+
"loss": 0.0,
|
6710 |
+
"step": 955
|
6711 |
+
},
|
6712 |
+
{
|
6713 |
+
"epoch": 2.992175273865415,
|
6714 |
+
"grad_norm": NaN,
|
6715 |
+
"learning_rate": 2.7513091833686334e-10,
|
6716 |
+
"loss": 0.0,
|
6717 |
+
"step": 956
|
6718 |
+
},
|
6719 |
+
{
|
6720 |
+
"epoch": 2.995305164319249,
|
6721 |
+
"grad_norm": NaN,
|
6722 |
+
"learning_rate": 0.0,
|
6723 |
+
"loss": 0.0,
|
6724 |
+
"step": 957
|
6725 |
}
|
6726 |
],
|
6727 |
"logging_steps": 1,
|
|
|
6736 |
"should_evaluate": false,
|
6737 |
"should_log": false,
|
6738 |
"should_save": true,
|
6739 |
+
"should_training_stop": true
|
6740 |
},
|
6741 |
"attributes": {}
|
6742 |
}
|
6743 |
},
|
6744 |
+
"total_flos": 2.534775174117458e+17,
|
6745 |
"train_batch_size": 4,
|
6746 |
"trial_name": null,
|
6747 |
"trial_params": null
|