Training in progress, step 1200, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5752
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6102e640df91591ae5e1421aa30e94079273b07415e80ce546a2f7411bc2177
|
3 |
size 5752
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 15814
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29ffb823fe01091bcec6d0831d144d2b9711f6f425a7201330c381559963d3b9
|
3 |
size 15814
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e00cb835fc366aa9f5489a07d0e068f8672dc83206d40c9854181d9d7a9a805c
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8b7384accc06cf321f008aa8494a569142e2fcc7cdc888583cfeddfa9f6e36fe
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": 11.5,
|
3 |
"best_model_checkpoint": "miner_id_24/checkpoint-300",
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 300,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -6339,6 +6339,2114 @@
|
|
6339 |
"eval_samples_per_second": 151.162,
|
6340 |
"eval_steps_per_second": 75.581,
|
6341 |
"step": 900
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6342 |
}
|
6343 |
],
|
6344 |
"logging_steps": 1,
|
@@ -6353,7 +8461,7 @@
|
|
6353 |
"early_stopping_threshold": 0.0
|
6354 |
},
|
6355 |
"attributes": {
|
6356 |
-
"early_stopping_patience_counter":
|
6357 |
}
|
6358 |
},
|
6359 |
"TrainerControl": {
|
@@ -6362,12 +8470,12 @@
|
|
6362 |
"should_evaluate": false,
|
6363 |
"should_log": false,
|
6364 |
"should_save": true,
|
6365 |
-
"should_training_stop":
|
6366 |
},
|
6367 |
"attributes": {}
|
6368 |
}
|
6369 |
},
|
6370 |
-
"total_flos":
|
6371 |
"train_batch_size": 2,
|
6372 |
"trial_name": null,
|
6373 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": 11.5,
|
3 |
"best_model_checkpoint": "miner_id_24/checkpoint-300",
|
4 |
+
"epoch": 0.16387286197125397,
|
5 |
"eval_steps": 300,
|
6 |
+
"global_step": 1200,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
6339 |
"eval_samples_per_second": 151.162,
|
6340 |
"eval_steps_per_second": 75.581,
|
6341 |
"step": 900
|
6342 |
+
},
|
6343 |
+
{
|
6344 |
+
"epoch": 0.12304120719674985,
|
6345 |
+
"grad_norm": 0.0008265585056506097,
|
6346 |
+
"learning_rate": 0.00015928737427682032,
|
6347 |
+
"loss": 46.0,
|
6348 |
+
"step": 901
|
6349 |
+
},
|
6350 |
+
{
|
6351 |
+
"epoch": 0.12317776791505923,
|
6352 |
+
"grad_norm": 0.0003382295253686607,
|
6353 |
+
"learning_rate": 0.00015920272918543257,
|
6354 |
+
"loss": 46.0,
|
6355 |
+
"step": 902
|
6356 |
+
},
|
6357 |
+
{
|
6358 |
+
"epoch": 0.12331432863336861,
|
6359 |
+
"grad_norm": 0.00040213519241660833,
|
6360 |
+
"learning_rate": 0.00015911801873599933,
|
6361 |
+
"loss": 46.0,
|
6362 |
+
"step": 903
|
6363 |
+
},
|
6364 |
+
{
|
6365 |
+
"epoch": 0.12345088935167799,
|
6366 |
+
"grad_norm": 0.0013933501904830337,
|
6367 |
+
"learning_rate": 0.00015903324302203836,
|
6368 |
+
"loss": 46.0,
|
6369 |
+
"step": 904
|
6370 |
+
},
|
6371 |
+
{
|
6372 |
+
"epoch": 0.12358745006998736,
|
6373 |
+
"grad_norm": 0.0003189480921719223,
|
6374 |
+
"learning_rate": 0.00015894840213713952,
|
6375 |
+
"loss": 46.0,
|
6376 |
+
"step": 905
|
6377 |
+
},
|
6378 |
+
{
|
6379 |
+
"epoch": 0.12372401078829674,
|
6380 |
+
"grad_norm": 0.00038199976552277803,
|
6381 |
+
"learning_rate": 0.0001588634961749646,
|
6382 |
+
"loss": 46.0,
|
6383 |
+
"step": 906
|
6384 |
+
},
|
6385 |
+
{
|
6386 |
+
"epoch": 0.12386057150660612,
|
6387 |
+
"grad_norm": 0.0005732272402383387,
|
6388 |
+
"learning_rate": 0.00015877852522924732,
|
6389 |
+
"loss": 46.0,
|
6390 |
+
"step": 907
|
6391 |
+
},
|
6392 |
+
{
|
6393 |
+
"epoch": 0.1239971322249155,
|
6394 |
+
"grad_norm": 0.0006085040513426065,
|
6395 |
+
"learning_rate": 0.00015869348939379302,
|
6396 |
+
"loss": 46.0,
|
6397 |
+
"step": 908
|
6398 |
+
},
|
6399 |
+
{
|
6400 |
+
"epoch": 0.12413369294322488,
|
6401 |
+
"grad_norm": 0.0004498598864302039,
|
6402 |
+
"learning_rate": 0.0001586083887624787,
|
6403 |
+
"loss": 46.0,
|
6404 |
+
"step": 909
|
6405 |
+
},
|
6406 |
+
{
|
6407 |
+
"epoch": 0.12427025366153426,
|
6408 |
+
"grad_norm": 0.0005596345290541649,
|
6409 |
+
"learning_rate": 0.00015852322342925295,
|
6410 |
+
"loss": 46.0,
|
6411 |
+
"step": 910
|
6412 |
+
},
|
6413 |
+
{
|
6414 |
+
"epoch": 0.12440681437984363,
|
6415 |
+
"grad_norm": 0.0004599193634930998,
|
6416 |
+
"learning_rate": 0.00015843799348813574,
|
6417 |
+
"loss": 46.0,
|
6418 |
+
"step": 911
|
6419 |
+
},
|
6420 |
+
{
|
6421 |
+
"epoch": 0.12454337509815301,
|
6422 |
+
"grad_norm": 0.0006004975875839591,
|
6423 |
+
"learning_rate": 0.0001583526990332184,
|
6424 |
+
"loss": 46.0,
|
6425 |
+
"step": 912
|
6426 |
+
},
|
6427 |
+
{
|
6428 |
+
"epoch": 0.12467993581646239,
|
6429 |
+
"grad_norm": 0.000272926437901333,
|
6430 |
+
"learning_rate": 0.00015826734015866344,
|
6431 |
+
"loss": 46.0,
|
6432 |
+
"step": 913
|
6433 |
+
},
|
6434 |
+
{
|
6435 |
+
"epoch": 0.12481649653477177,
|
6436 |
+
"grad_norm": 0.00042817802750505507,
|
6437 |
+
"learning_rate": 0.00015818191695870452,
|
6438 |
+
"loss": 46.0,
|
6439 |
+
"step": 914
|
6440 |
+
},
|
6441 |
+
{
|
6442 |
+
"epoch": 0.12495305725308115,
|
6443 |
+
"grad_norm": 0.0005577219417318702,
|
6444 |
+
"learning_rate": 0.00015809642952764632,
|
6445 |
+
"loss": 46.0,
|
6446 |
+
"step": 915
|
6447 |
+
},
|
6448 |
+
{
|
6449 |
+
"epoch": 0.12508961797139054,
|
6450 |
+
"grad_norm": 0.0007422527414746583,
|
6451 |
+
"learning_rate": 0.00015801087795986438,
|
6452 |
+
"loss": 46.0,
|
6453 |
+
"step": 916
|
6454 |
+
},
|
6455 |
+
{
|
6456 |
+
"epoch": 0.1252261786896999,
|
6457 |
+
"grad_norm": 0.00034818475251086056,
|
6458 |
+
"learning_rate": 0.0001579252623498051,
|
6459 |
+
"loss": 46.0,
|
6460 |
+
"step": 917
|
6461 |
+
},
|
6462 |
+
{
|
6463 |
+
"epoch": 0.1253627394080093,
|
6464 |
+
"grad_norm": 0.00017885708075482398,
|
6465 |
+
"learning_rate": 0.0001578395827919855,
|
6466 |
+
"loss": 46.0,
|
6467 |
+
"step": 918
|
6468 |
+
},
|
6469 |
+
{
|
6470 |
+
"epoch": 0.12549930012631866,
|
6471 |
+
"grad_norm": 0.0005659409216605127,
|
6472 |
+
"learning_rate": 0.00015775383938099332,
|
6473 |
+
"loss": 46.0,
|
6474 |
+
"step": 919
|
6475 |
+
},
|
6476 |
+
{
|
6477 |
+
"epoch": 0.12563586084462805,
|
6478 |
+
"grad_norm": 0.0005314027075655758,
|
6479 |
+
"learning_rate": 0.00015766803221148673,
|
6480 |
+
"loss": 46.0,
|
6481 |
+
"step": 920
|
6482 |
+
},
|
6483 |
+
{
|
6484 |
+
"epoch": 0.12577242156293741,
|
6485 |
+
"grad_norm": 0.0008784609963186085,
|
6486 |
+
"learning_rate": 0.00015758216137819422,
|
6487 |
+
"loss": 46.0,
|
6488 |
+
"step": 921
|
6489 |
+
},
|
6490 |
+
{
|
6491 |
+
"epoch": 0.1259089822812468,
|
6492 |
+
"grad_norm": 0.00040658083162270486,
|
6493 |
+
"learning_rate": 0.0001574962269759147,
|
6494 |
+
"loss": 46.0,
|
6495 |
+
"step": 922
|
6496 |
+
},
|
6497 |
+
{
|
6498 |
+
"epoch": 0.12604554299955617,
|
6499 |
+
"grad_norm": 0.0002654801937751472,
|
6500 |
+
"learning_rate": 0.00015741022909951716,
|
6501 |
+
"loss": 46.0,
|
6502 |
+
"step": 923
|
6503 |
+
},
|
6504 |
+
{
|
6505 |
+
"epoch": 0.12618210371786556,
|
6506 |
+
"grad_norm": 0.0008352938457392156,
|
6507 |
+
"learning_rate": 0.00015732416784394065,
|
6508 |
+
"loss": 46.0,
|
6509 |
+
"step": 924
|
6510 |
+
},
|
6511 |
+
{
|
6512 |
+
"epoch": 0.12631866443617493,
|
6513 |
+
"grad_norm": 0.0007195891230367124,
|
6514 |
+
"learning_rate": 0.00015723804330419422,
|
6515 |
+
"loss": 46.0,
|
6516 |
+
"step": 925
|
6517 |
+
},
|
6518 |
+
{
|
6519 |
+
"epoch": 0.12645522515448432,
|
6520 |
+
"grad_norm": 0.00043209362775087357,
|
6521 |
+
"learning_rate": 0.00015715185557535689,
|
6522 |
+
"loss": 46.0,
|
6523 |
+
"step": 926
|
6524 |
+
},
|
6525 |
+
{
|
6526 |
+
"epoch": 0.12659178587279368,
|
6527 |
+
"grad_norm": 0.0005025159334763885,
|
6528 |
+
"learning_rate": 0.00015706560475257727,
|
6529 |
+
"loss": 46.0,
|
6530 |
+
"step": 927
|
6531 |
+
},
|
6532 |
+
{
|
6533 |
+
"epoch": 0.12672834659110307,
|
6534 |
+
"grad_norm": 0.00031747709726914763,
|
6535 |
+
"learning_rate": 0.00015697929093107365,
|
6536 |
+
"loss": 46.0,
|
6537 |
+
"step": 928
|
6538 |
+
},
|
6539 |
+
{
|
6540 |
+
"epoch": 0.12686490730941244,
|
6541 |
+
"grad_norm": 0.0004966561100445688,
|
6542 |
+
"learning_rate": 0.000156892914206134,
|
6543 |
+
"loss": 46.0,
|
6544 |
+
"step": 929
|
6545 |
+
},
|
6546 |
+
{
|
6547 |
+
"epoch": 0.12700146802772183,
|
6548 |
+
"grad_norm": 0.000545269635040313,
|
6549 |
+
"learning_rate": 0.00015680647467311557,
|
6550 |
+
"loss": 46.0,
|
6551 |
+
"step": 930
|
6552 |
+
},
|
6553 |
+
{
|
6554 |
+
"epoch": 0.1271380287460312,
|
6555 |
+
"grad_norm": 0.00019552423327695578,
|
6556 |
+
"learning_rate": 0.00015671997242744511,
|
6557 |
+
"loss": 46.0,
|
6558 |
+
"step": 931
|
6559 |
+
},
|
6560 |
+
{
|
6561 |
+
"epoch": 0.1272745894643406,
|
6562 |
+
"grad_norm": 0.0018716168124228716,
|
6563 |
+
"learning_rate": 0.00015663340756461844,
|
6564 |
+
"loss": 46.0,
|
6565 |
+
"step": 932
|
6566 |
+
},
|
6567 |
+
{
|
6568 |
+
"epoch": 0.12741115018264995,
|
6569 |
+
"grad_norm": 0.0008228892693296075,
|
6570 |
+
"learning_rate": 0.0001565467801802006,
|
6571 |
+
"loss": 46.0,
|
6572 |
+
"step": 933
|
6573 |
+
},
|
6574 |
+
{
|
6575 |
+
"epoch": 0.12754771090095934,
|
6576 |
+
"grad_norm": 0.0006504448829218745,
|
6577 |
+
"learning_rate": 0.00015646009036982567,
|
6578 |
+
"loss": 46.0,
|
6579 |
+
"step": 934
|
6580 |
+
},
|
6581 |
+
{
|
6582 |
+
"epoch": 0.1276842716192687,
|
6583 |
+
"grad_norm": 0.0009379376424476504,
|
6584 |
+
"learning_rate": 0.00015637333822919656,
|
6585 |
+
"loss": 46.0,
|
6586 |
+
"step": 935
|
6587 |
+
},
|
6588 |
+
{
|
6589 |
+
"epoch": 0.1278208323375781,
|
6590 |
+
"grad_norm": 0.0004887664690613747,
|
6591 |
+
"learning_rate": 0.00015628652385408508,
|
6592 |
+
"loss": 46.0,
|
6593 |
+
"step": 936
|
6594 |
+
},
|
6595 |
+
{
|
6596 |
+
"epoch": 0.12795739305588746,
|
6597 |
+
"grad_norm": 0.00038833319558762014,
|
6598 |
+
"learning_rate": 0.00015619964734033172,
|
6599 |
+
"loss": 46.0,
|
6600 |
+
"step": 937
|
6601 |
+
},
|
6602 |
+
{
|
6603 |
+
"epoch": 0.12809395377419686,
|
6604 |
+
"grad_norm": 0.0010323721216991544,
|
6605 |
+
"learning_rate": 0.00015611270878384552,
|
6606 |
+
"loss": 46.0,
|
6607 |
+
"step": 938
|
6608 |
+
},
|
6609 |
+
{
|
6610 |
+
"epoch": 0.12823051449250622,
|
6611 |
+
"grad_norm": 0.0009570369147695601,
|
6612 |
+
"learning_rate": 0.00015602570828060407,
|
6613 |
+
"loss": 46.0,
|
6614 |
+
"step": 939
|
6615 |
+
},
|
6616 |
+
{
|
6617 |
+
"epoch": 0.1283670752108156,
|
6618 |
+
"grad_norm": 0.0024246196262538433,
|
6619 |
+
"learning_rate": 0.00015593864592665333,
|
6620 |
+
"loss": 46.0,
|
6621 |
+
"step": 940
|
6622 |
+
},
|
6623 |
+
{
|
6624 |
+
"epoch": 0.12850363592912498,
|
6625 |
+
"grad_norm": 0.000476339366286993,
|
6626 |
+
"learning_rate": 0.00015585152181810753,
|
6627 |
+
"loss": 46.0,
|
6628 |
+
"step": 941
|
6629 |
+
},
|
6630 |
+
{
|
6631 |
+
"epoch": 0.12864019664743437,
|
6632 |
+
"grad_norm": 0.0007504654349759221,
|
6633 |
+
"learning_rate": 0.00015576433605114912,
|
6634 |
+
"loss": 46.0,
|
6635 |
+
"step": 942
|
6636 |
+
},
|
6637 |
+
{
|
6638 |
+
"epoch": 0.12877675736574373,
|
6639 |
+
"grad_norm": 0.0006261309026740491,
|
6640 |
+
"learning_rate": 0.00015567708872202854,
|
6641 |
+
"loss": 46.0,
|
6642 |
+
"step": 943
|
6643 |
+
},
|
6644 |
+
{
|
6645 |
+
"epoch": 0.12891331808405312,
|
6646 |
+
"grad_norm": 0.007645017467439175,
|
6647 |
+
"learning_rate": 0.00015558977992706426,
|
6648 |
+
"loss": 46.0,
|
6649 |
+
"step": 944
|
6650 |
+
},
|
6651 |
+
{
|
6652 |
+
"epoch": 0.1290498788023625,
|
6653 |
+
"grad_norm": 0.0004795739660039544,
|
6654 |
+
"learning_rate": 0.00015550240976264253,
|
6655 |
+
"loss": 46.0,
|
6656 |
+
"step": 945
|
6657 |
+
},
|
6658 |
+
{
|
6659 |
+
"epoch": 0.12918643952067188,
|
6660 |
+
"grad_norm": 0.0020615507382899523,
|
6661 |
+
"learning_rate": 0.0001554149783252175,
|
6662 |
+
"loss": 46.0,
|
6663 |
+
"step": 946
|
6664 |
+
},
|
6665 |
+
{
|
6666 |
+
"epoch": 0.12932300023898124,
|
6667 |
+
"grad_norm": 0.0015128081431612372,
|
6668 |
+
"learning_rate": 0.0001553274857113108,
|
6669 |
+
"loss": 46.0,
|
6670 |
+
"step": 947
|
6671 |
+
},
|
6672 |
+
{
|
6673 |
+
"epoch": 0.12945956095729064,
|
6674 |
+
"grad_norm": 0.0017247534124180675,
|
6675 |
+
"learning_rate": 0.00015523993201751167,
|
6676 |
+
"loss": 46.0,
|
6677 |
+
"step": 948
|
6678 |
+
},
|
6679 |
+
{
|
6680 |
+
"epoch": 0.1295961216756,
|
6681 |
+
"grad_norm": 0.0014766879612579942,
|
6682 |
+
"learning_rate": 0.00015515231734047677,
|
6683 |
+
"loss": 46.0,
|
6684 |
+
"step": 949
|
6685 |
+
},
|
6686 |
+
{
|
6687 |
+
"epoch": 0.1297326823939094,
|
6688 |
+
"grad_norm": 0.0012566361110657454,
|
6689 |
+
"learning_rate": 0.0001550646417769301,
|
6690 |
+
"loss": 46.0,
|
6691 |
+
"step": 950
|
6692 |
+
},
|
6693 |
+
{
|
6694 |
+
"epoch": 0.12986924311221876,
|
6695 |
+
"grad_norm": 0.0005157532868906856,
|
6696 |
+
"learning_rate": 0.0001549769054236629,
|
6697 |
+
"loss": 46.0,
|
6698 |
+
"step": 951
|
6699 |
+
},
|
6700 |
+
{
|
6701 |
+
"epoch": 0.13000580383052815,
|
6702 |
+
"grad_norm": 0.0003065975906793028,
|
6703 |
+
"learning_rate": 0.00015488910837753342,
|
6704 |
+
"loss": 46.0,
|
6705 |
+
"step": 952
|
6706 |
+
},
|
6707 |
+
{
|
6708 |
+
"epoch": 0.1301423645488375,
|
6709 |
+
"grad_norm": 0.0006567993550561368,
|
6710 |
+
"learning_rate": 0.00015480125073546704,
|
6711 |
+
"loss": 46.0,
|
6712 |
+
"step": 953
|
6713 |
+
},
|
6714 |
+
{
|
6715 |
+
"epoch": 0.1302789252671469,
|
6716 |
+
"grad_norm": 0.0003647709672804922,
|
6717 |
+
"learning_rate": 0.0001547133325944559,
|
6718 |
+
"loss": 46.0,
|
6719 |
+
"step": 954
|
6720 |
+
},
|
6721 |
+
{
|
6722 |
+
"epoch": 0.13041548598545627,
|
6723 |
+
"grad_norm": 0.0007084013195708394,
|
6724 |
+
"learning_rate": 0.00015462535405155902,
|
6725 |
+
"loss": 46.0,
|
6726 |
+
"step": 955
|
6727 |
+
},
|
6728 |
+
{
|
6729 |
+
"epoch": 0.13055204670376566,
|
6730 |
+
"grad_norm": 0.00019670635811053216,
|
6731 |
+
"learning_rate": 0.00015453731520390215,
|
6732 |
+
"loss": 46.0,
|
6733 |
+
"step": 956
|
6734 |
+
},
|
6735 |
+
{
|
6736 |
+
"epoch": 0.13068860742207505,
|
6737 |
+
"grad_norm": 0.0008069248287938535,
|
6738 |
+
"learning_rate": 0.0001544492161486775,
|
6739 |
+
"loss": 46.0,
|
6740 |
+
"step": 957
|
6741 |
+
},
|
6742 |
+
{
|
6743 |
+
"epoch": 0.13082516814038442,
|
6744 |
+
"grad_norm": 0.00020321070041973144,
|
6745 |
+
"learning_rate": 0.00015436105698314384,
|
6746 |
+
"loss": 46.0,
|
6747 |
+
"step": 958
|
6748 |
+
},
|
6749 |
+
{
|
6750 |
+
"epoch": 0.1309617288586938,
|
6751 |
+
"grad_norm": 0.0007830312824808061,
|
6752 |
+
"learning_rate": 0.0001542728378046262,
|
6753 |
+
"loss": 46.0,
|
6754 |
+
"step": 959
|
6755 |
+
},
|
6756 |
+
{
|
6757 |
+
"epoch": 0.13109828957700317,
|
6758 |
+
"grad_norm": 0.0009551789262332022,
|
6759 |
+
"learning_rate": 0.00015418455871051592,
|
6760 |
+
"loss": 46.0,
|
6761 |
+
"step": 960
|
6762 |
+
},
|
6763 |
+
{
|
6764 |
+
"epoch": 0.13123485029531257,
|
6765 |
+
"grad_norm": 0.0005781060899607837,
|
6766 |
+
"learning_rate": 0.00015409621979827048,
|
6767 |
+
"loss": 46.0,
|
6768 |
+
"step": 961
|
6769 |
+
},
|
6770 |
+
{
|
6771 |
+
"epoch": 0.13137141101362193,
|
6772 |
+
"grad_norm": 0.0003388145414646715,
|
6773 |
+
"learning_rate": 0.0001540078211654135,
|
6774 |
+
"loss": 46.0,
|
6775 |
+
"step": 962
|
6776 |
+
},
|
6777 |
+
{
|
6778 |
+
"epoch": 0.13150797173193132,
|
6779 |
+
"grad_norm": 0.00038392908754758537,
|
6780 |
+
"learning_rate": 0.0001539193629095343,
|
6781 |
+
"loss": 46.0,
|
6782 |
+
"step": 963
|
6783 |
+
},
|
6784 |
+
{
|
6785 |
+
"epoch": 0.1316445324502407,
|
6786 |
+
"grad_norm": 0.0004694248200394213,
|
6787 |
+
"learning_rate": 0.00015383084512828824,
|
6788 |
+
"loss": 46.0,
|
6789 |
+
"step": 964
|
6790 |
+
},
|
6791 |
+
{
|
6792 |
+
"epoch": 0.13178109316855008,
|
6793 |
+
"grad_norm": 0.0003350118058733642,
|
6794 |
+
"learning_rate": 0.00015374226791939628,
|
6795 |
+
"loss": 46.0,
|
6796 |
+
"step": 965
|
6797 |
+
},
|
6798 |
+
{
|
6799 |
+
"epoch": 0.13191765388685944,
|
6800 |
+
"grad_norm": 0.0006715120980516076,
|
6801 |
+
"learning_rate": 0.000153653631380645,
|
6802 |
+
"loss": 46.0,
|
6803 |
+
"step": 966
|
6804 |
+
},
|
6805 |
+
{
|
6806 |
+
"epoch": 0.13205421460516883,
|
6807 |
+
"grad_norm": 0.0017796235624700785,
|
6808 |
+
"learning_rate": 0.0001535649356098865,
|
6809 |
+
"loss": 46.0,
|
6810 |
+
"step": 967
|
6811 |
+
},
|
6812 |
+
{
|
6813 |
+
"epoch": 0.1321907753234782,
|
6814 |
+
"grad_norm": 0.0003016989503521472,
|
6815 |
+
"learning_rate": 0.00015347618070503827,
|
6816 |
+
"loss": 46.0,
|
6817 |
+
"step": 968
|
6818 |
+
},
|
6819 |
+
{
|
6820 |
+
"epoch": 0.1323273360417876,
|
6821 |
+
"grad_norm": 0.00016873747517820448,
|
6822 |
+
"learning_rate": 0.0001533873667640831,
|
6823 |
+
"loss": 46.0,
|
6824 |
+
"step": 969
|
6825 |
+
},
|
6826 |
+
{
|
6827 |
+
"epoch": 0.13246389676009696,
|
6828 |
+
"grad_norm": 0.0004186210280749947,
|
6829 |
+
"learning_rate": 0.00015329849388506886,
|
6830 |
+
"loss": 46.0,
|
6831 |
+
"step": 970
|
6832 |
+
},
|
6833 |
+
{
|
6834 |
+
"epoch": 0.13260045747840635,
|
6835 |
+
"grad_norm": 0.00041822067578323185,
|
6836 |
+
"learning_rate": 0.00015320956216610866,
|
6837 |
+
"loss": 46.0,
|
6838 |
+
"step": 971
|
6839 |
+
},
|
6840 |
+
{
|
6841 |
+
"epoch": 0.1327370181967157,
|
6842 |
+
"grad_norm": 0.0005545847816392779,
|
6843 |
+
"learning_rate": 0.00015312057170538035,
|
6844 |
+
"loss": 46.0,
|
6845 |
+
"step": 972
|
6846 |
+
},
|
6847 |
+
{
|
6848 |
+
"epoch": 0.1328735789150251,
|
6849 |
+
"grad_norm": 0.0009207350667566061,
|
6850 |
+
"learning_rate": 0.00015303152260112682,
|
6851 |
+
"loss": 46.0,
|
6852 |
+
"step": 973
|
6853 |
+
},
|
6854 |
+
{
|
6855 |
+
"epoch": 0.13301013963333447,
|
6856 |
+
"grad_norm": 0.0008230162202380598,
|
6857 |
+
"learning_rate": 0.00015294241495165557,
|
6858 |
+
"loss": 46.0,
|
6859 |
+
"step": 974
|
6860 |
+
},
|
6861 |
+
{
|
6862 |
+
"epoch": 0.13314670035164386,
|
6863 |
+
"grad_norm": 0.0004879190237261355,
|
6864 |
+
"learning_rate": 0.00015285324885533884,
|
6865 |
+
"loss": 46.0,
|
6866 |
+
"step": 975
|
6867 |
+
},
|
6868 |
+
{
|
6869 |
+
"epoch": 0.13328326106995322,
|
6870 |
+
"grad_norm": 0.00039031868800520897,
|
6871 |
+
"learning_rate": 0.0001527640244106133,
|
6872 |
+
"loss": 46.0,
|
6873 |
+
"step": 976
|
6874 |
+
},
|
6875 |
+
{
|
6876 |
+
"epoch": 0.13341982178826262,
|
6877 |
+
"grad_norm": 0.002188141457736492,
|
6878 |
+
"learning_rate": 0.00015267474171598005,
|
6879 |
+
"loss": 46.0,
|
6880 |
+
"step": 977
|
6881 |
+
},
|
6882 |
+
{
|
6883 |
+
"epoch": 0.13355638250657198,
|
6884 |
+
"grad_norm": 0.00032051303423941135,
|
6885 |
+
"learning_rate": 0.0001525854008700046,
|
6886 |
+
"loss": 46.0,
|
6887 |
+
"step": 978
|
6888 |
+
},
|
6889 |
+
{
|
6890 |
+
"epoch": 0.13369294322488137,
|
6891 |
+
"grad_norm": 0.0003121615736745298,
|
6892 |
+
"learning_rate": 0.00015249600197131651,
|
6893 |
+
"loss": 46.0,
|
6894 |
+
"step": 979
|
6895 |
+
},
|
6896 |
+
{
|
6897 |
+
"epoch": 0.13382950394319074,
|
6898 |
+
"grad_norm": 0.00036074977833777666,
|
6899 |
+
"learning_rate": 0.0001524065451186095,
|
6900 |
+
"loss": 46.0,
|
6901 |
+
"step": 980
|
6902 |
+
},
|
6903 |
+
{
|
6904 |
+
"epoch": 0.13396606466150013,
|
6905 |
+
"grad_norm": 0.0004405210493132472,
|
6906 |
+
"learning_rate": 0.0001523170304106413,
|
6907 |
+
"loss": 46.0,
|
6908 |
+
"step": 981
|
6909 |
+
},
|
6910 |
+
{
|
6911 |
+
"epoch": 0.1341026253798095,
|
6912 |
+
"grad_norm": 0.0005337116890586913,
|
6913 |
+
"learning_rate": 0.0001522274579462334,
|
6914 |
+
"loss": 46.0,
|
6915 |
+
"step": 982
|
6916 |
+
},
|
6917 |
+
{
|
6918 |
+
"epoch": 0.13423918609811888,
|
6919 |
+
"grad_norm": 0.0008604004397056997,
|
6920 |
+
"learning_rate": 0.00015213782782427123,
|
6921 |
+
"loss": 46.0,
|
6922 |
+
"step": 983
|
6923 |
+
},
|
6924 |
+
{
|
6925 |
+
"epoch": 0.13437574681642825,
|
6926 |
+
"grad_norm": 0.00042646893416531384,
|
6927 |
+
"learning_rate": 0.00015204814014370372,
|
6928 |
+
"loss": 46.0,
|
6929 |
+
"step": 984
|
6930 |
+
},
|
6931 |
+
{
|
6932 |
+
"epoch": 0.13451230753473764,
|
6933 |
+
"grad_norm": 0.0015927368076518178,
|
6934 |
+
"learning_rate": 0.00015195839500354335,
|
6935 |
+
"loss": 46.0,
|
6936 |
+
"step": 985
|
6937 |
+
},
|
6938 |
+
{
|
6939 |
+
"epoch": 0.134648868253047,
|
6940 |
+
"grad_norm": 0.0005996901891194284,
|
6941 |
+
"learning_rate": 0.00015186859250286615,
|
6942 |
+
"loss": 46.0,
|
6943 |
+
"step": 986
|
6944 |
+
},
|
6945 |
+
{
|
6946 |
+
"epoch": 0.1347854289713564,
|
6947 |
+
"grad_norm": 0.0010192387271672487,
|
6948 |
+
"learning_rate": 0.00015177873274081137,
|
6949 |
+
"loss": 46.0,
|
6950 |
+
"step": 987
|
6951 |
+
},
|
6952 |
+
{
|
6953 |
+
"epoch": 0.13492198968966576,
|
6954 |
+
"grad_norm": 0.0008904286078177392,
|
6955 |
+
"learning_rate": 0.00015168881581658147,
|
6956 |
+
"loss": 46.0,
|
6957 |
+
"step": 988
|
6958 |
+
},
|
6959 |
+
{
|
6960 |
+
"epoch": 0.13505855040797515,
|
6961 |
+
"grad_norm": 0.001511828857474029,
|
6962 |
+
"learning_rate": 0.00015159884182944211,
|
6963 |
+
"loss": 46.0,
|
6964 |
+
"step": 989
|
6965 |
+
},
|
6966 |
+
{
|
6967 |
+
"epoch": 0.13519511112628452,
|
6968 |
+
"grad_norm": 0.0010609021410346031,
|
6969 |
+
"learning_rate": 0.00015150881087872185,
|
6970 |
+
"loss": 46.0,
|
6971 |
+
"step": 990
|
6972 |
+
},
|
6973 |
+
{
|
6974 |
+
"epoch": 0.1353316718445939,
|
6975 |
+
"grad_norm": 0.0008939913823269308,
|
6976 |
+
"learning_rate": 0.00015141872306381215,
|
6977 |
+
"loss": 46.0,
|
6978 |
+
"step": 991
|
6979 |
+
},
|
6980 |
+
{
|
6981 |
+
"epoch": 0.13546823256290327,
|
6982 |
+
"grad_norm": 0.0004643239954020828,
|
6983 |
+
"learning_rate": 0.00015132857848416733,
|
6984 |
+
"loss": 46.0,
|
6985 |
+
"step": 992
|
6986 |
+
},
|
6987 |
+
{
|
6988 |
+
"epoch": 0.13560479328121267,
|
6989 |
+
"grad_norm": 0.0012045464245602489,
|
6990 |
+
"learning_rate": 0.00015123837723930424,
|
6991 |
+
"loss": 46.0,
|
6992 |
+
"step": 993
|
6993 |
+
},
|
6994 |
+
{
|
6995 |
+
"epoch": 0.13574135399952203,
|
6996 |
+
"grad_norm": 0.0006322221015579998,
|
6997 |
+
"learning_rate": 0.00015114811942880242,
|
6998 |
+
"loss": 46.0,
|
6999 |
+
"step": 994
|
7000 |
+
},
|
7001 |
+
{
|
7002 |
+
"epoch": 0.13587791471783142,
|
7003 |
+
"grad_norm": 0.0005334729212336242,
|
7004 |
+
"learning_rate": 0.00015105780515230376,
|
7005 |
+
"loss": 46.0,
|
7006 |
+
"step": 995
|
7007 |
+
},
|
7008 |
+
{
|
7009 |
+
"epoch": 0.13601447543614079,
|
7010 |
+
"grad_norm": 0.0009253498283214867,
|
7011 |
+
"learning_rate": 0.00015096743450951258,
|
7012 |
+
"loss": 46.0,
|
7013 |
+
"step": 996
|
7014 |
+
},
|
7015 |
+
{
|
7016 |
+
"epoch": 0.13615103615445018,
|
7017 |
+
"grad_norm": 0.000553362478967756,
|
7018 |
+
"learning_rate": 0.00015087700760019532,
|
7019 |
+
"loss": 46.0,
|
7020 |
+
"step": 997
|
7021 |
+
},
|
7022 |
+
{
|
7023 |
+
"epoch": 0.13628759687275954,
|
7024 |
+
"grad_norm": 0.0024341470561921597,
|
7025 |
+
"learning_rate": 0.00015078652452418063,
|
7026 |
+
"loss": 46.0,
|
7027 |
+
"step": 998
|
7028 |
+
},
|
7029 |
+
{
|
7030 |
+
"epoch": 0.13642415759106893,
|
7031 |
+
"grad_norm": 0.0007676496752537787,
|
7032 |
+
"learning_rate": 0.00015069598538135906,
|
7033 |
+
"loss": 46.0,
|
7034 |
+
"step": 999
|
7035 |
+
},
|
7036 |
+
{
|
7037 |
+
"epoch": 0.1365607183093783,
|
7038 |
+
"grad_norm": 0.0014793665613979101,
|
7039 |
+
"learning_rate": 0.00015060539027168316,
|
7040 |
+
"loss": 46.0,
|
7041 |
+
"step": 1000
|
7042 |
+
},
|
7043 |
+
{
|
7044 |
+
"epoch": 0.1366972790276877,
|
7045 |
+
"grad_norm": 0.00045033841161057353,
|
7046 |
+
"learning_rate": 0.00015051473929516722,
|
7047 |
+
"loss": 46.0,
|
7048 |
+
"step": 1001
|
7049 |
+
},
|
7050 |
+
{
|
7051 |
+
"epoch": 0.13683383974599705,
|
7052 |
+
"grad_norm": 0.0005630904342979193,
|
7053 |
+
"learning_rate": 0.00015042403255188723,
|
7054 |
+
"loss": 46.0,
|
7055 |
+
"step": 1002
|
7056 |
+
},
|
7057 |
+
{
|
7058 |
+
"epoch": 0.13697040046430645,
|
7059 |
+
"grad_norm": 0.0003502410836517811,
|
7060 |
+
"learning_rate": 0.00015033327014198075,
|
7061 |
+
"loss": 46.0,
|
7062 |
+
"step": 1003
|
7063 |
+
},
|
7064 |
+
{
|
7065 |
+
"epoch": 0.1371069611826158,
|
7066 |
+
"grad_norm": 0.0004893496516160667,
|
7067 |
+
"learning_rate": 0.00015024245216564667,
|
7068 |
+
"loss": 46.0,
|
7069 |
+
"step": 1004
|
7070 |
+
},
|
7071 |
+
{
|
7072 |
+
"epoch": 0.1372435219009252,
|
7073 |
+
"grad_norm": 0.0007234884542413056,
|
7074 |
+
"learning_rate": 0.00015015157872314542,
|
7075 |
+
"loss": 46.0,
|
7076 |
+
"step": 1005
|
7077 |
+
},
|
7078 |
+
{
|
7079 |
+
"epoch": 0.13738008261923457,
|
7080 |
+
"grad_norm": 0.000497414730489254,
|
7081 |
+
"learning_rate": 0.00015006064991479853,
|
7082 |
+
"loss": 46.0,
|
7083 |
+
"step": 1006
|
7084 |
+
},
|
7085 |
+
{
|
7086 |
+
"epoch": 0.13751664333754396,
|
7087 |
+
"grad_norm": 0.00021947573986835778,
|
7088 |
+
"learning_rate": 0.0001499696658409887,
|
7089 |
+
"loss": 46.0,
|
7090 |
+
"step": 1007
|
7091 |
+
},
|
7092 |
+
{
|
7093 |
+
"epoch": 0.13765320405585332,
|
7094 |
+
"grad_norm": 0.0005359818460419774,
|
7095 |
+
"learning_rate": 0.00014987862660215966,
|
7096 |
+
"loss": 46.0,
|
7097 |
+
"step": 1008
|
7098 |
+
},
|
7099 |
+
{
|
7100 |
+
"epoch": 0.13778976477416272,
|
7101 |
+
"grad_norm": 0.0010016814339905977,
|
7102 |
+
"learning_rate": 0.00014978753229881594,
|
7103 |
+
"loss": 46.0,
|
7104 |
+
"step": 1009
|
7105 |
+
},
|
7106 |
+
{
|
7107 |
+
"epoch": 0.13792632549247208,
|
7108 |
+
"grad_norm": 0.00039751100121065974,
|
7109 |
+
"learning_rate": 0.00014969638303152295,
|
7110 |
+
"loss": 46.0,
|
7111 |
+
"step": 1010
|
7112 |
+
},
|
7113 |
+
{
|
7114 |
+
"epoch": 0.13806288621078147,
|
7115 |
+
"grad_norm": 0.00047437057946808636,
|
7116 |
+
"learning_rate": 0.0001496051789009068,
|
7117 |
+
"loss": 46.0,
|
7118 |
+
"step": 1011
|
7119 |
+
},
|
7120 |
+
{
|
7121 |
+
"epoch": 0.13819944692909084,
|
7122 |
+
"grad_norm": 0.0006710319430567324,
|
7123 |
+
"learning_rate": 0.00014951392000765411,
|
7124 |
+
"loss": 46.0,
|
7125 |
+
"step": 1012
|
7126 |
+
},
|
7127 |
+
{
|
7128 |
+
"epoch": 0.13833600764740023,
|
7129 |
+
"grad_norm": 0.0004975870833732188,
|
7130 |
+
"learning_rate": 0.000149422606452512,
|
7131 |
+
"loss": 46.0,
|
7132 |
+
"step": 1013
|
7133 |
+
},
|
7134 |
+
{
|
7135 |
+
"epoch": 0.1384725683657096,
|
7136 |
+
"grad_norm": 0.0002557364059612155,
|
7137 |
+
"learning_rate": 0.00014933123833628785,
|
7138 |
+
"loss": 46.0,
|
7139 |
+
"step": 1014
|
7140 |
+
},
|
7141 |
+
{
|
7142 |
+
"epoch": 0.13860912908401898,
|
7143 |
+
"grad_norm": 0.00041163183050230145,
|
7144 |
+
"learning_rate": 0.00014923981575984936,
|
7145 |
+
"loss": 46.0,
|
7146 |
+
"step": 1015
|
7147 |
+
},
|
7148 |
+
{
|
7149 |
+
"epoch": 0.13874568980232835,
|
7150 |
+
"grad_norm": 0.000696165778208524,
|
7151 |
+
"learning_rate": 0.00014914833882412435,
|
7152 |
+
"loss": 46.0,
|
7153 |
+
"step": 1016
|
7154 |
+
},
|
7155 |
+
{
|
7156 |
+
"epoch": 0.13888225052063774,
|
7157 |
+
"grad_norm": 0.0004750938678625971,
|
7158 |
+
"learning_rate": 0.00014905680763010058,
|
7159 |
+
"loss": 46.0,
|
7160 |
+
"step": 1017
|
7161 |
+
},
|
7162 |
+
{
|
7163 |
+
"epoch": 0.1390188112389471,
|
7164 |
+
"grad_norm": 0.0006398932309821248,
|
7165 |
+
"learning_rate": 0.00014896522227882578,
|
7166 |
+
"loss": 46.0,
|
7167 |
+
"step": 1018
|
7168 |
+
},
|
7169 |
+
{
|
7170 |
+
"epoch": 0.1391553719572565,
|
7171 |
+
"grad_norm": 0.0010499466443434358,
|
7172 |
+
"learning_rate": 0.00014887358287140744,
|
7173 |
+
"loss": 46.0,
|
7174 |
+
"step": 1019
|
7175 |
+
},
|
7176 |
+
{
|
7177 |
+
"epoch": 0.13929193267556586,
|
7178 |
+
"grad_norm": 0.0013344734907150269,
|
7179 |
+
"learning_rate": 0.00014878188950901276,
|
7180 |
+
"loss": 46.0,
|
7181 |
+
"step": 1020
|
7182 |
+
},
|
7183 |
+
{
|
7184 |
+
"epoch": 0.13942849339387525,
|
7185 |
+
"grad_norm": 0.005541623570024967,
|
7186 |
+
"learning_rate": 0.0001486901422928684,
|
7187 |
+
"loss": 46.0,
|
7188 |
+
"step": 1021
|
7189 |
+
},
|
7190 |
+
{
|
7191 |
+
"epoch": 0.13956505411218462,
|
7192 |
+
"grad_norm": 0.00047458295011892915,
|
7193 |
+
"learning_rate": 0.0001485983413242606,
|
7194 |
+
"loss": 46.0,
|
7195 |
+
"step": 1022
|
7196 |
+
},
|
7197 |
+
{
|
7198 |
+
"epoch": 0.139701614830494,
|
7199 |
+
"grad_norm": 0.00033128532231785357,
|
7200 |
+
"learning_rate": 0.00014850648670453493,
|
7201 |
+
"loss": 46.0,
|
7202 |
+
"step": 1023
|
7203 |
+
},
|
7204 |
+
{
|
7205 |
+
"epoch": 0.13983817554880337,
|
7206 |
+
"grad_norm": 0.0005759402411058545,
|
7207 |
+
"learning_rate": 0.00014841457853509606,
|
7208 |
+
"loss": 46.0,
|
7209 |
+
"step": 1024
|
7210 |
+
},
|
7211 |
+
{
|
7212 |
+
"epoch": 0.13997473626711276,
|
7213 |
+
"grad_norm": 0.0002624512417241931,
|
7214 |
+
"learning_rate": 0.0001483226169174079,
|
7215 |
+
"loss": 46.0,
|
7216 |
+
"step": 1025
|
7217 |
+
},
|
7218 |
+
{
|
7219 |
+
"epoch": 0.14011129698542216,
|
7220 |
+
"grad_norm": 0.0005069900071248412,
|
7221 |
+
"learning_rate": 0.00014823060195299337,
|
7222 |
+
"loss": 46.0,
|
7223 |
+
"step": 1026
|
7224 |
+
},
|
7225 |
+
{
|
7226 |
+
"epoch": 0.14024785770373152,
|
7227 |
+
"grad_norm": 0.000561585184186697,
|
7228 |
+
"learning_rate": 0.00014813853374343419,
|
7229 |
+
"loss": 46.0,
|
7230 |
+
"step": 1027
|
7231 |
+
},
|
7232 |
+
{
|
7233 |
+
"epoch": 0.1403844184220409,
|
7234 |
+
"grad_norm": 0.00040233286563307047,
|
7235 |
+
"learning_rate": 0.00014804641239037097,
|
7236 |
+
"loss": 46.0,
|
7237 |
+
"step": 1028
|
7238 |
+
},
|
7239 |
+
{
|
7240 |
+
"epoch": 0.14052097914035028,
|
7241 |
+
"grad_norm": 0.0006136324373073876,
|
7242 |
+
"learning_rate": 0.00014795423799550284,
|
7243 |
+
"loss": 46.0,
|
7244 |
+
"step": 1029
|
7245 |
+
},
|
7246 |
+
{
|
7247 |
+
"epoch": 0.14065753985865967,
|
7248 |
+
"grad_norm": 0.0004925274988636374,
|
7249 |
+
"learning_rate": 0.00014786201066058766,
|
7250 |
+
"loss": 46.0,
|
7251 |
+
"step": 1030
|
7252 |
+
},
|
7253 |
+
{
|
7254 |
+
"epoch": 0.14079410057696903,
|
7255 |
+
"grad_norm": 0.0006804429576732218,
|
7256 |
+
"learning_rate": 0.00014776973048744165,
|
7257 |
+
"loss": 46.0,
|
7258 |
+
"step": 1031
|
7259 |
+
},
|
7260 |
+
{
|
7261 |
+
"epoch": 0.14093066129527843,
|
7262 |
+
"grad_norm": 0.00039797197678126395,
|
7263 |
+
"learning_rate": 0.0001476773975779393,
|
7264 |
+
"loss": 46.0,
|
7265 |
+
"step": 1032
|
7266 |
+
},
|
7267 |
+
{
|
7268 |
+
"epoch": 0.1410672220135878,
|
7269 |
+
"grad_norm": 0.0005878604715690017,
|
7270 |
+
"learning_rate": 0.00014758501203401348,
|
7271 |
+
"loss": 46.0,
|
7272 |
+
"step": 1033
|
7273 |
+
},
|
7274 |
+
{
|
7275 |
+
"epoch": 0.14120378273189718,
|
7276 |
+
"grad_norm": 0.001166685950011015,
|
7277 |
+
"learning_rate": 0.00014749257395765502,
|
7278 |
+
"loss": 46.0,
|
7279 |
+
"step": 1034
|
7280 |
+
},
|
7281 |
+
{
|
7282 |
+
"epoch": 0.14134034345020655,
|
7283 |
+
"grad_norm": 0.0006884298636578023,
|
7284 |
+
"learning_rate": 0.0001474000834509128,
|
7285 |
+
"loss": 46.0,
|
7286 |
+
"step": 1035
|
7287 |
+
},
|
7288 |
+
{
|
7289 |
+
"epoch": 0.14147690416851594,
|
7290 |
+
"grad_norm": 0.000649279507342726,
|
7291 |
+
"learning_rate": 0.00014730754061589355,
|
7292 |
+
"loss": 46.0,
|
7293 |
+
"step": 1036
|
7294 |
+
},
|
7295 |
+
{
|
7296 |
+
"epoch": 0.1416134648868253,
|
7297 |
+
"grad_norm": 0.0009792500641196966,
|
7298 |
+
"learning_rate": 0.00014721494555476188,
|
7299 |
+
"loss": 46.0,
|
7300 |
+
"step": 1037
|
7301 |
+
},
|
7302 |
+
{
|
7303 |
+
"epoch": 0.1417500256051347,
|
7304 |
+
"grad_norm": 0.0010333802783861756,
|
7305 |
+
"learning_rate": 0.00014712229836973988,
|
7306 |
+
"loss": 46.0,
|
7307 |
+
"step": 1038
|
7308 |
+
},
|
7309 |
+
{
|
7310 |
+
"epoch": 0.14188658632344406,
|
7311 |
+
"grad_norm": 0.002243500202894211,
|
7312 |
+
"learning_rate": 0.00014702959916310736,
|
7313 |
+
"loss": 46.0,
|
7314 |
+
"step": 1039
|
7315 |
+
},
|
7316 |
+
{
|
7317 |
+
"epoch": 0.14202314704175345,
|
7318 |
+
"grad_norm": 0.011758006177842617,
|
7319 |
+
"learning_rate": 0.00014693684803720138,
|
7320 |
+
"loss": 46.0,
|
7321 |
+
"step": 1040
|
7322 |
+
},
|
7323 |
+
{
|
7324 |
+
"epoch": 0.14215970776006281,
|
7325 |
+
"grad_norm": 0.00039091319194994867,
|
7326 |
+
"learning_rate": 0.0001468440450944165,
|
7327 |
+
"loss": 46.0,
|
7328 |
+
"step": 1041
|
7329 |
+
},
|
7330 |
+
{
|
7331 |
+
"epoch": 0.1422962684783722,
|
7332 |
+
"grad_norm": 0.0005926385056227446,
|
7333 |
+
"learning_rate": 0.00014675119043720437,
|
7334 |
+
"loss": 46.0,
|
7335 |
+
"step": 1042
|
7336 |
+
},
|
7337 |
+
{
|
7338 |
+
"epoch": 0.14243282919668157,
|
7339 |
+
"grad_norm": 0.0009843135485425591,
|
7340 |
+
"learning_rate": 0.0001466582841680737,
|
7341 |
+
"loss": 46.0,
|
7342 |
+
"step": 1043
|
7343 |
+
},
|
7344 |
+
{
|
7345 |
+
"epoch": 0.14256938991499096,
|
7346 |
+
"grad_norm": 0.0006317980587482452,
|
7347 |
+
"learning_rate": 0.00014656532638959035,
|
7348 |
+
"loss": 46.0,
|
7349 |
+
"step": 1044
|
7350 |
+
},
|
7351 |
+
{
|
7352 |
+
"epoch": 0.14270595063330033,
|
7353 |
+
"grad_norm": 0.0017019541701301932,
|
7354 |
+
"learning_rate": 0.00014647231720437686,
|
7355 |
+
"loss": 46.0,
|
7356 |
+
"step": 1045
|
7357 |
+
},
|
7358 |
+
{
|
7359 |
+
"epoch": 0.14284251135160972,
|
7360 |
+
"grad_norm": 0.0006502672913484275,
|
7361 |
+
"learning_rate": 0.0001463792567151126,
|
7362 |
+
"loss": 46.0,
|
7363 |
+
"step": 1046
|
7364 |
+
},
|
7365 |
+
{
|
7366 |
+
"epoch": 0.14297907206991908,
|
7367 |
+
"grad_norm": 0.0007161677349358797,
|
7368 |
+
"learning_rate": 0.0001462861450245336,
|
7369 |
+
"loss": 46.0,
|
7370 |
+
"step": 1047
|
7371 |
+
},
|
7372 |
+
{
|
7373 |
+
"epoch": 0.14311563278822848,
|
7374 |
+
"grad_norm": 0.0017611135262995958,
|
7375 |
+
"learning_rate": 0.00014619298223543235,
|
7376 |
+
"loss": 46.0,
|
7377 |
+
"step": 1048
|
7378 |
+
},
|
7379 |
+
{
|
7380 |
+
"epoch": 0.14325219350653784,
|
7381 |
+
"grad_norm": 0.0015631330898031592,
|
7382 |
+
"learning_rate": 0.00014609976845065783,
|
7383 |
+
"loss": 46.0,
|
7384 |
+
"step": 1049
|
7385 |
+
},
|
7386 |
+
{
|
7387 |
+
"epoch": 0.14338875422484723,
|
7388 |
+
"grad_norm": 0.0012707884889096022,
|
7389 |
+
"learning_rate": 0.00014600650377311522,
|
7390 |
+
"loss": 46.0,
|
7391 |
+
"step": 1050
|
7392 |
+
},
|
7393 |
+
{
|
7394 |
+
"epoch": 0.1435253149431566,
|
7395 |
+
"grad_norm": 0.0006978696910664439,
|
7396 |
+
"learning_rate": 0.00014591318830576598,
|
7397 |
+
"loss": 46.0,
|
7398 |
+
"step": 1051
|
7399 |
+
},
|
7400 |
+
{
|
7401 |
+
"epoch": 0.143661875661466,
|
7402 |
+
"grad_norm": 0.00038435377064161,
|
7403 |
+
"learning_rate": 0.0001458198221516276,
|
7404 |
+
"loss": 46.0,
|
7405 |
+
"step": 1052
|
7406 |
+
},
|
7407 |
+
{
|
7408 |
+
"epoch": 0.14379843637977535,
|
7409 |
+
"grad_norm": 0.000803441449534148,
|
7410 |
+
"learning_rate": 0.0001457264054137735,
|
7411 |
+
"loss": 46.0,
|
7412 |
+
"step": 1053
|
7413 |
+
},
|
7414 |
+
{
|
7415 |
+
"epoch": 0.14393499709808474,
|
7416 |
+
"grad_norm": 0.00036193919368088245,
|
7417 |
+
"learning_rate": 0.000145632938195333,
|
7418 |
+
"loss": 46.0,
|
7419 |
+
"step": 1054
|
7420 |
+
},
|
7421 |
+
{
|
7422 |
+
"epoch": 0.1440715578163941,
|
7423 |
+
"grad_norm": 0.0004987095016986132,
|
7424 |
+
"learning_rate": 0.0001455394205994911,
|
7425 |
+
"loss": 46.0,
|
7426 |
+
"step": 1055
|
7427 |
+
},
|
7428 |
+
{
|
7429 |
+
"epoch": 0.1442081185347035,
|
7430 |
+
"grad_norm": 0.0003004857280757278,
|
7431 |
+
"learning_rate": 0.00014544585272948843,
|
7432 |
+
"loss": 46.0,
|
7433 |
+
"step": 1056
|
7434 |
+
},
|
7435 |
+
{
|
7436 |
+
"epoch": 0.14434467925301286,
|
7437 |
+
"grad_norm": 0.0006807829486206174,
|
7438 |
+
"learning_rate": 0.00014535223468862114,
|
7439 |
+
"loss": 46.0,
|
7440 |
+
"step": 1057
|
7441 |
+
},
|
7442 |
+
{
|
7443 |
+
"epoch": 0.14448123997132226,
|
7444 |
+
"grad_norm": 0.0004499613423831761,
|
7445 |
+
"learning_rate": 0.00014525856658024076,
|
7446 |
+
"loss": 46.0,
|
7447 |
+
"step": 1058
|
7448 |
+
},
|
7449 |
+
{
|
7450 |
+
"epoch": 0.14461780068963162,
|
7451 |
+
"grad_norm": 0.00042672737617976964,
|
7452 |
+
"learning_rate": 0.00014516484850775406,
|
7453 |
+
"loss": 46.0,
|
7454 |
+
"step": 1059
|
7455 |
+
},
|
7456 |
+
{
|
7457 |
+
"epoch": 0.144754361407941,
|
7458 |
+
"grad_norm": 0.0007686454919166863,
|
7459 |
+
"learning_rate": 0.00014507108057462296,
|
7460 |
+
"loss": 46.0,
|
7461 |
+
"step": 1060
|
7462 |
+
},
|
7463 |
+
{
|
7464 |
+
"epoch": 0.14489092212625038,
|
7465 |
+
"grad_norm": 0.0003870718355756253,
|
7466 |
+
"learning_rate": 0.00014497726288436458,
|
7467 |
+
"loss": 46.0,
|
7468 |
+
"step": 1061
|
7469 |
+
},
|
7470 |
+
{
|
7471 |
+
"epoch": 0.14502748284455977,
|
7472 |
+
"grad_norm": 0.00014696974540129304,
|
7473 |
+
"learning_rate": 0.00014488339554055073,
|
7474 |
+
"loss": 46.0,
|
7475 |
+
"step": 1062
|
7476 |
+
},
|
7477 |
+
{
|
7478 |
+
"epoch": 0.14516404356286913,
|
7479 |
+
"grad_norm": 0.0004816751170437783,
|
7480 |
+
"learning_rate": 0.0001447894786468082,
|
7481 |
+
"loss": 46.0,
|
7482 |
+
"step": 1063
|
7483 |
+
},
|
7484 |
+
{
|
7485 |
+
"epoch": 0.14530060428117852,
|
7486 |
+
"grad_norm": 0.0003365412703715265,
|
7487 |
+
"learning_rate": 0.00014469551230681844,
|
7488 |
+
"loss": 46.0,
|
7489 |
+
"step": 1064
|
7490 |
+
},
|
7491 |
+
{
|
7492 |
+
"epoch": 0.1454371649994879,
|
7493 |
+
"grad_norm": 0.00025079355691559613,
|
7494 |
+
"learning_rate": 0.00014460149662431747,
|
7495 |
+
"loss": 46.0,
|
7496 |
+
"step": 1065
|
7497 |
+
},
|
7498 |
+
{
|
7499 |
+
"epoch": 0.14557372571779728,
|
7500 |
+
"grad_norm": 0.00041992668411694467,
|
7501 |
+
"learning_rate": 0.00014450743170309584,
|
7502 |
+
"loss": 46.0,
|
7503 |
+
"step": 1066
|
7504 |
+
},
|
7505 |
+
{
|
7506 |
+
"epoch": 0.14571028643610665,
|
7507 |
+
"grad_norm": 0.00041824494837783277,
|
7508 |
+
"learning_rate": 0.00014441331764699836,
|
7509 |
+
"loss": 46.0,
|
7510 |
+
"step": 1067
|
7511 |
+
},
|
7512 |
+
{
|
7513 |
+
"epoch": 0.14584684715441604,
|
7514 |
+
"grad_norm": 0.0005437216605059803,
|
7515 |
+
"learning_rate": 0.00014431915455992414,
|
7516 |
+
"loss": 46.0,
|
7517 |
+
"step": 1068
|
7518 |
+
},
|
7519 |
+
{
|
7520 |
+
"epoch": 0.1459834078727254,
|
7521 |
+
"grad_norm": 0.001002727192826569,
|
7522 |
+
"learning_rate": 0.00014422494254582647,
|
7523 |
+
"loss": 46.0,
|
7524 |
+
"step": 1069
|
7525 |
+
},
|
7526 |
+
{
|
7527 |
+
"epoch": 0.1461199685910348,
|
7528 |
+
"grad_norm": 0.0003184748929925263,
|
7529 |
+
"learning_rate": 0.0001441306817087125,
|
7530 |
+
"loss": 46.0,
|
7531 |
+
"step": 1070
|
7532 |
+
},
|
7533 |
+
{
|
7534 |
+
"epoch": 0.14625652930934416,
|
7535 |
+
"grad_norm": 0.00027102333842776716,
|
7536 |
+
"learning_rate": 0.00014403637215264353,
|
7537 |
+
"loss": 46.0,
|
7538 |
+
"step": 1071
|
7539 |
+
},
|
7540 |
+
{
|
7541 |
+
"epoch": 0.14639309002765355,
|
7542 |
+
"grad_norm": 0.0006005719187669456,
|
7543 |
+
"learning_rate": 0.00014394201398173437,
|
7544 |
+
"loss": 46.0,
|
7545 |
+
"step": 1072
|
7546 |
+
},
|
7547 |
+
{
|
7548 |
+
"epoch": 0.1465296507459629,
|
7549 |
+
"grad_norm": 0.0007069504936225712,
|
7550 |
+
"learning_rate": 0.00014384760730015364,
|
7551 |
+
"loss": 46.0,
|
7552 |
+
"step": 1073
|
7553 |
+
},
|
7554 |
+
{
|
7555 |
+
"epoch": 0.1466662114642723,
|
7556 |
+
"grad_norm": 0.00026792430435307324,
|
7557 |
+
"learning_rate": 0.00014375315221212357,
|
7558 |
+
"loss": 46.0,
|
7559 |
+
"step": 1074
|
7560 |
+
},
|
7561 |
+
{
|
7562 |
+
"epoch": 0.14680277218258167,
|
7563 |
+
"grad_norm": 0.0004024659574497491,
|
7564 |
+
"learning_rate": 0.00014365864882191968,
|
7565 |
+
"loss": 46.0,
|
7566 |
+
"step": 1075
|
7567 |
+
},
|
7568 |
+
{
|
7569 |
+
"epoch": 0.14693933290089106,
|
7570 |
+
"grad_norm": 0.0010856115259230137,
|
7571 |
+
"learning_rate": 0.0001435640972338709,
|
7572 |
+
"loss": 46.0,
|
7573 |
+
"step": 1076
|
7574 |
+
},
|
7575 |
+
{
|
7576 |
+
"epoch": 0.14707589361920043,
|
7577 |
+
"grad_norm": 0.0007027225801721215,
|
7578 |
+
"learning_rate": 0.00014346949755235944,
|
7579 |
+
"loss": 46.0,
|
7580 |
+
"step": 1077
|
7581 |
+
},
|
7582 |
+
{
|
7583 |
+
"epoch": 0.14721245433750982,
|
7584 |
+
"grad_norm": 0.0004892551223747432,
|
7585 |
+
"learning_rate": 0.00014337484988182042,
|
7586 |
+
"loss": 46.0,
|
7587 |
+
"step": 1078
|
7588 |
+
},
|
7589 |
+
{
|
7590 |
+
"epoch": 0.14734901505581918,
|
7591 |
+
"grad_norm": 0.0004152679175604135,
|
7592 |
+
"learning_rate": 0.00014328015432674214,
|
7593 |
+
"loss": 46.0,
|
7594 |
+
"step": 1079
|
7595 |
+
},
|
7596 |
+
{
|
7597 |
+
"epoch": 0.14748557577412857,
|
7598 |
+
"grad_norm": 0.0007273554219864309,
|
7599 |
+
"learning_rate": 0.00014318541099166555,
|
7600 |
+
"loss": 46.0,
|
7601 |
+
"step": 1080
|
7602 |
+
},
|
7603 |
+
{
|
7604 |
+
"epoch": 0.14762213649243794,
|
7605 |
+
"grad_norm": 0.0013073707232251763,
|
7606 |
+
"learning_rate": 0.00014309061998118454,
|
7607 |
+
"loss": 46.0,
|
7608 |
+
"step": 1081
|
7609 |
+
},
|
7610 |
+
{
|
7611 |
+
"epoch": 0.14775869721074733,
|
7612 |
+
"grad_norm": 0.0008130230708047748,
|
7613 |
+
"learning_rate": 0.00014299578139994557,
|
7614 |
+
"loss": 46.0,
|
7615 |
+
"step": 1082
|
7616 |
+
},
|
7617 |
+
{
|
7618 |
+
"epoch": 0.1478952579290567,
|
7619 |
+
"grad_norm": 0.0018470624927431345,
|
7620 |
+
"learning_rate": 0.00014290089535264755,
|
7621 |
+
"loss": 46.0,
|
7622 |
+
"step": 1083
|
7623 |
+
},
|
7624 |
+
{
|
7625 |
+
"epoch": 0.1480318186473661,
|
7626 |
+
"grad_norm": 0.00037394886021502316,
|
7627 |
+
"learning_rate": 0.0001428059619440419,
|
7628 |
+
"loss": 46.0,
|
7629 |
+
"step": 1084
|
7630 |
+
},
|
7631 |
+
{
|
7632 |
+
"epoch": 0.14816837936567545,
|
7633 |
+
"grad_norm": 0.00034777220571413636,
|
7634 |
+
"learning_rate": 0.00014271098127893218,
|
7635 |
+
"loss": 46.0,
|
7636 |
+
"step": 1085
|
7637 |
+
},
|
7638 |
+
{
|
7639 |
+
"epoch": 0.14830494008398484,
|
7640 |
+
"grad_norm": 0.0007399898604489863,
|
7641 |
+
"learning_rate": 0.0001426159534621743,
|
7642 |
+
"loss": 46.0,
|
7643 |
+
"step": 1086
|
7644 |
+
},
|
7645 |
+
{
|
7646 |
+
"epoch": 0.1484415008022942,
|
7647 |
+
"grad_norm": 0.0009711306192912161,
|
7648 |
+
"learning_rate": 0.00014252087859867608,
|
7649 |
+
"loss": 46.0,
|
7650 |
+
"step": 1087
|
7651 |
+
},
|
7652 |
+
{
|
7653 |
+
"epoch": 0.1485780615206036,
|
7654 |
+
"grad_norm": 0.0075881583616137505,
|
7655 |
+
"learning_rate": 0.00014242575679339738,
|
7656 |
+
"loss": 46.0,
|
7657 |
+
"step": 1088
|
7658 |
+
},
|
7659 |
+
{
|
7660 |
+
"epoch": 0.14871462223891296,
|
7661 |
+
"grad_norm": 0.0006968184607103467,
|
7662 |
+
"learning_rate": 0.00014233058815134978,
|
7663 |
+
"loss": 46.0,
|
7664 |
+
"step": 1089
|
7665 |
+
},
|
7666 |
+
{
|
7667 |
+
"epoch": 0.14885118295722236,
|
7668 |
+
"grad_norm": 0.002149011706933379,
|
7669 |
+
"learning_rate": 0.00014223537277759666,
|
7670 |
+
"loss": 46.0,
|
7671 |
+
"step": 1090
|
7672 |
+
},
|
7673 |
+
{
|
7674 |
+
"epoch": 0.14898774367553172,
|
7675 |
+
"grad_norm": 0.0012700235238298774,
|
7676 |
+
"learning_rate": 0.00014214011077725292,
|
7677 |
+
"loss": 46.0,
|
7678 |
+
"step": 1091
|
7679 |
+
},
|
7680 |
+
{
|
7681 |
+
"epoch": 0.1491243043938411,
|
7682 |
+
"grad_norm": 0.002270018681883812,
|
7683 |
+
"learning_rate": 0.00014204480225548494,
|
7684 |
+
"loss": 46.0,
|
7685 |
+
"step": 1092
|
7686 |
+
},
|
7687 |
+
{
|
7688 |
+
"epoch": 0.1492608651121505,
|
7689 |
+
"grad_norm": 0.0012636389583349228,
|
7690 |
+
"learning_rate": 0.00014194944731751058,
|
7691 |
+
"loss": 46.0,
|
7692 |
+
"step": 1093
|
7693 |
+
},
|
7694 |
+
{
|
7695 |
+
"epoch": 0.14939742583045987,
|
7696 |
+
"grad_norm": 0.0021358972880989313,
|
7697 |
+
"learning_rate": 0.00014185404606859877,
|
7698 |
+
"loss": 46.0,
|
7699 |
+
"step": 1094
|
7700 |
+
},
|
7701 |
+
{
|
7702 |
+
"epoch": 0.14953398654876926,
|
7703 |
+
"grad_norm": 0.0002943683648481965,
|
7704 |
+
"learning_rate": 0.00014175859861406966,
|
7705 |
+
"loss": 46.0,
|
7706 |
+
"step": 1095
|
7707 |
+
},
|
7708 |
+
{
|
7709 |
+
"epoch": 0.14967054726707862,
|
7710 |
+
"grad_norm": 0.0019006587099283934,
|
7711 |
+
"learning_rate": 0.00014166310505929434,
|
7712 |
+
"loss": 46.0,
|
7713 |
+
"step": 1096
|
7714 |
+
},
|
7715 |
+
{
|
7716 |
+
"epoch": 0.14980710798538802,
|
7717 |
+
"grad_norm": 0.0006432710797525942,
|
7718 |
+
"learning_rate": 0.00014156756550969492,
|
7719 |
+
"loss": 46.0,
|
7720 |
+
"step": 1097
|
7721 |
+
},
|
7722 |
+
{
|
7723 |
+
"epoch": 0.14994366870369738,
|
7724 |
+
"grad_norm": 0.0009876599069684744,
|
7725 |
+
"learning_rate": 0.00014147198007074415,
|
7726 |
+
"loss": 46.0,
|
7727 |
+
"step": 1098
|
7728 |
+
},
|
7729 |
+
{
|
7730 |
+
"epoch": 0.15008022942200677,
|
7731 |
+
"grad_norm": 0.0007576916250400245,
|
7732 |
+
"learning_rate": 0.00014137634884796557,
|
7733 |
+
"loss": 46.0,
|
7734 |
+
"step": 1099
|
7735 |
+
},
|
7736 |
+
{
|
7737 |
+
"epoch": 0.15021679014031614,
|
7738 |
+
"grad_norm": 0.001445831498131156,
|
7739 |
+
"learning_rate": 0.00014128067194693316,
|
7740 |
+
"loss": 46.0,
|
7741 |
+
"step": 1100
|
7742 |
+
},
|
7743 |
+
{
|
7744 |
+
"epoch": 0.15035335085862553,
|
7745 |
+
"grad_norm": 0.0003806152381002903,
|
7746 |
+
"learning_rate": 0.0001411849494732713,
|
7747 |
+
"loss": 46.0,
|
7748 |
+
"step": 1101
|
7749 |
+
},
|
7750 |
+
{
|
7751 |
+
"epoch": 0.1504899115769349,
|
7752 |
+
"grad_norm": 0.0004383942286949605,
|
7753 |
+
"learning_rate": 0.00014108918153265485,
|
7754 |
+
"loss": 46.0,
|
7755 |
+
"step": 1102
|
7756 |
+
},
|
7757 |
+
{
|
7758 |
+
"epoch": 0.15062647229524428,
|
7759 |
+
"grad_norm": 0.0023259969893842936,
|
7760 |
+
"learning_rate": 0.00014099336823080865,
|
7761 |
+
"loss": 46.0,
|
7762 |
+
"step": 1103
|
7763 |
+
},
|
7764 |
+
{
|
7765 |
+
"epoch": 0.15076303301355365,
|
7766 |
+
"grad_norm": 0.00025817190180532634,
|
7767 |
+
"learning_rate": 0.00014089750967350781,
|
7768 |
+
"loss": 46.0,
|
7769 |
+
"step": 1104
|
7770 |
+
},
|
7771 |
+
{
|
7772 |
+
"epoch": 0.15089959373186304,
|
7773 |
+
"grad_norm": 0.0005795572069473565,
|
7774 |
+
"learning_rate": 0.0001408016059665773,
|
7775 |
+
"loss": 46.0,
|
7776 |
+
"step": 1105
|
7777 |
+
},
|
7778 |
+
{
|
7779 |
+
"epoch": 0.1510361544501724,
|
7780 |
+
"grad_norm": 0.00029461763915605843,
|
7781 |
+
"learning_rate": 0.00014070565721589195,
|
7782 |
+
"loss": 46.0,
|
7783 |
+
"step": 1106
|
7784 |
+
},
|
7785 |
+
{
|
7786 |
+
"epoch": 0.1511727151684818,
|
7787 |
+
"grad_norm": 0.00029116563382558525,
|
7788 |
+
"learning_rate": 0.00014060966352737628,
|
7789 |
+
"loss": 46.0,
|
7790 |
+
"step": 1107
|
7791 |
+
},
|
7792 |
+
{
|
7793 |
+
"epoch": 0.15130927588679116,
|
7794 |
+
"grad_norm": 0.0006152401329018176,
|
7795 |
+
"learning_rate": 0.00014051362500700447,
|
7796 |
+
"loss": 46.0,
|
7797 |
+
"step": 1108
|
7798 |
+
},
|
7799 |
+
{
|
7800 |
+
"epoch": 0.15144583660510055,
|
7801 |
+
"grad_norm": 0.000545252813026309,
|
7802 |
+
"learning_rate": 0.00014041754176080017,
|
7803 |
+
"loss": 46.0,
|
7804 |
+
"step": 1109
|
7805 |
+
},
|
7806 |
+
{
|
7807 |
+
"epoch": 0.15158239732340992,
|
7808 |
+
"grad_norm": 0.0009326356812380254,
|
7809 |
+
"learning_rate": 0.00014032141389483648,
|
7810 |
+
"loss": 46.0,
|
7811 |
+
"step": 1110
|
7812 |
+
},
|
7813 |
+
{
|
7814 |
+
"epoch": 0.1517189580417193,
|
7815 |
+
"grad_norm": 0.0003035578120034188,
|
7816 |
+
"learning_rate": 0.00014022524151523563,
|
7817 |
+
"loss": 46.0,
|
7818 |
+
"step": 1111
|
7819 |
+
},
|
7820 |
+
{
|
7821 |
+
"epoch": 0.15185551876002867,
|
7822 |
+
"grad_norm": 0.0002604456967674196,
|
7823 |
+
"learning_rate": 0.00014012902472816907,
|
7824 |
+
"loss": 46.0,
|
7825 |
+
"step": 1112
|
7826 |
+
},
|
7827 |
+
{
|
7828 |
+
"epoch": 0.15199207947833807,
|
7829 |
+
"grad_norm": 0.001296610222198069,
|
7830 |
+
"learning_rate": 0.00014003276363985727,
|
7831 |
+
"loss": 46.0,
|
7832 |
+
"step": 1113
|
7833 |
+
},
|
7834 |
+
{
|
7835 |
+
"epoch": 0.15212864019664743,
|
7836 |
+
"grad_norm": 0.0007714428938925266,
|
7837 |
+
"learning_rate": 0.00013993645835656953,
|
7838 |
+
"loss": 46.0,
|
7839 |
+
"step": 1114
|
7840 |
+
},
|
7841 |
+
{
|
7842 |
+
"epoch": 0.15226520091495682,
|
7843 |
+
"grad_norm": 0.000851266726385802,
|
7844 |
+
"learning_rate": 0.00013984010898462416,
|
7845 |
+
"loss": 46.0,
|
7846 |
+
"step": 1115
|
7847 |
+
},
|
7848 |
+
{
|
7849 |
+
"epoch": 0.1524017616332662,
|
7850 |
+
"grad_norm": 0.000391008477890864,
|
7851 |
+
"learning_rate": 0.00013974371563038785,
|
7852 |
+
"loss": 46.0,
|
7853 |
+
"step": 1116
|
7854 |
+
},
|
7855 |
+
{
|
7856 |
+
"epoch": 0.15253832235157558,
|
7857 |
+
"grad_norm": 0.0006454547983594239,
|
7858 |
+
"learning_rate": 0.00013964727840027604,
|
7859 |
+
"loss": 46.0,
|
7860 |
+
"step": 1117
|
7861 |
+
},
|
7862 |
+
{
|
7863 |
+
"epoch": 0.15267488306988494,
|
7864 |
+
"grad_norm": 0.0010403237538412213,
|
7865 |
+
"learning_rate": 0.00013955079740075256,
|
7866 |
+
"loss": 46.0,
|
7867 |
+
"step": 1118
|
7868 |
+
},
|
7869 |
+
{
|
7870 |
+
"epoch": 0.15281144378819433,
|
7871 |
+
"grad_norm": 0.0002263033966301009,
|
7872 |
+
"learning_rate": 0.00013945427273832954,
|
7873 |
+
"loss": 46.0,
|
7874 |
+
"step": 1119
|
7875 |
+
},
|
7876 |
+
{
|
7877 |
+
"epoch": 0.1529480045065037,
|
7878 |
+
"grad_norm": 0.0009018364362418652,
|
7879 |
+
"learning_rate": 0.0001393577045195673,
|
7880 |
+
"loss": 46.0,
|
7881 |
+
"step": 1120
|
7882 |
+
},
|
7883 |
+
{
|
7884 |
+
"epoch": 0.1530845652248131,
|
7885 |
+
"grad_norm": 0.00043761078268289566,
|
7886 |
+
"learning_rate": 0.0001392610928510743,
|
7887 |
+
"loss": 46.0,
|
7888 |
+
"step": 1121
|
7889 |
+
},
|
7890 |
+
{
|
7891 |
+
"epoch": 0.15322112594312245,
|
7892 |
+
"grad_norm": 0.0007157377549447119,
|
7893 |
+
"learning_rate": 0.00013916443783950694,
|
7894 |
+
"loss": 46.0,
|
7895 |
+
"step": 1122
|
7896 |
+
},
|
7897 |
+
{
|
7898 |
+
"epoch": 0.15335768666143185,
|
7899 |
+
"grad_norm": 0.0011558537371456623,
|
7900 |
+
"learning_rate": 0.00013906773959156948,
|
7901 |
+
"loss": 46.0,
|
7902 |
+
"step": 1123
|
7903 |
+
},
|
7904 |
+
{
|
7905 |
+
"epoch": 0.1534942473797412,
|
7906 |
+
"grad_norm": 0.0006105789798311889,
|
7907 |
+
"learning_rate": 0.00013897099821401384,
|
7908 |
+
"loss": 46.0,
|
7909 |
+
"step": 1124
|
7910 |
+
},
|
7911 |
+
{
|
7912 |
+
"epoch": 0.1536308080980506,
|
7913 |
+
"grad_norm": 0.00031421618768945336,
|
7914 |
+
"learning_rate": 0.00013887421381363968,
|
7915 |
+
"loss": 46.0,
|
7916 |
+
"step": 1125
|
7917 |
+
},
|
7918 |
+
{
|
7919 |
+
"epoch": 0.15376736881635997,
|
7920 |
+
"grad_norm": 0.00037318936665542424,
|
7921 |
+
"learning_rate": 0.00013877738649729405,
|
7922 |
+
"loss": 46.0,
|
7923 |
+
"step": 1126
|
7924 |
+
},
|
7925 |
+
{
|
7926 |
+
"epoch": 0.15390392953466936,
|
7927 |
+
"grad_norm": 0.0004407285014167428,
|
7928 |
+
"learning_rate": 0.00013868051637187144,
|
7929 |
+
"loss": 46.0,
|
7930 |
+
"step": 1127
|
7931 |
+
},
|
7932 |
+
{
|
7933 |
+
"epoch": 0.15404049025297872,
|
7934 |
+
"grad_norm": 0.0005434497143141925,
|
7935 |
+
"learning_rate": 0.00013858360354431355,
|
7936 |
+
"loss": 46.0,
|
7937 |
+
"step": 1128
|
7938 |
+
},
|
7939 |
+
{
|
7940 |
+
"epoch": 0.15417705097128812,
|
7941 |
+
"grad_norm": 0.0009806392481550574,
|
7942 |
+
"learning_rate": 0.00013848664812160925,
|
7943 |
+
"loss": 46.0,
|
7944 |
+
"step": 1129
|
7945 |
+
},
|
7946 |
+
{
|
7947 |
+
"epoch": 0.15431361168959748,
|
7948 |
+
"grad_norm": 0.0007962798699736595,
|
7949 |
+
"learning_rate": 0.00013838965021079446,
|
7950 |
+
"loss": 46.0,
|
7951 |
+
"step": 1130
|
7952 |
+
},
|
7953 |
+
{
|
7954 |
+
"epoch": 0.15445017240790687,
|
7955 |
+
"grad_norm": 0.001249475870281458,
|
7956 |
+
"learning_rate": 0.00013829260991895197,
|
7957 |
+
"loss": 46.0,
|
7958 |
+
"step": 1131
|
7959 |
+
},
|
7960 |
+
{
|
7961 |
+
"epoch": 0.15458673312621624,
|
7962 |
+
"grad_norm": 0.001490502618253231,
|
7963 |
+
"learning_rate": 0.00013819552735321134,
|
7964 |
+
"loss": 46.0,
|
7965 |
+
"step": 1132
|
7966 |
+
},
|
7967 |
+
{
|
7968 |
+
"epoch": 0.15472329384452563,
|
7969 |
+
"grad_norm": 0.0006211231811903417,
|
7970 |
+
"learning_rate": 0.00013809840262074885,
|
7971 |
+
"loss": 46.0,
|
7972 |
+
"step": 1133
|
7973 |
+
},
|
7974 |
+
{
|
7975 |
+
"epoch": 0.154859854562835,
|
7976 |
+
"grad_norm": 0.0006204847013577819,
|
7977 |
+
"learning_rate": 0.0001380012358287873,
|
7978 |
+
"loss": 46.0,
|
7979 |
+
"step": 1134
|
7980 |
+
},
|
7981 |
+
{
|
7982 |
+
"epoch": 0.15499641528114438,
|
7983 |
+
"grad_norm": 0.00036665357765741646,
|
7984 |
+
"learning_rate": 0.0001379040270845959,
|
7985 |
+
"loss": 46.0,
|
7986 |
+
"step": 1135
|
7987 |
+
},
|
7988 |
+
{
|
7989 |
+
"epoch": 0.15513297599945375,
|
7990 |
+
"grad_norm": 0.0005352575681172311,
|
7991 |
+
"learning_rate": 0.00013780677649549025,
|
7992 |
+
"loss": 46.0,
|
7993 |
+
"step": 1136
|
7994 |
+
},
|
7995 |
+
{
|
7996 |
+
"epoch": 0.15526953671776314,
|
7997 |
+
"grad_norm": 0.0004583641130011529,
|
7998 |
+
"learning_rate": 0.00013770948416883205,
|
7999 |
+
"loss": 46.0,
|
8000 |
+
"step": 1137
|
8001 |
+
},
|
8002 |
+
{
|
8003 |
+
"epoch": 0.1554060974360725,
|
8004 |
+
"grad_norm": 0.0007949661812745035,
|
8005 |
+
"learning_rate": 0.00013761215021202916,
|
8006 |
+
"loss": 46.0,
|
8007 |
+
"step": 1138
|
8008 |
+
},
|
8009 |
+
{
|
8010 |
+
"epoch": 0.1555426581543819,
|
8011 |
+
"grad_norm": 0.0009157547610811889,
|
8012 |
+
"learning_rate": 0.00013751477473253533,
|
8013 |
+
"loss": 46.0,
|
8014 |
+
"step": 1139
|
8015 |
+
},
|
8016 |
+
{
|
8017 |
+
"epoch": 0.15567921887269126,
|
8018 |
+
"grad_norm": 0.0013030368136242032,
|
8019 |
+
"learning_rate": 0.0001374173578378502,
|
8020 |
+
"loss": 46.0,
|
8021 |
+
"step": 1140
|
8022 |
+
},
|
8023 |
+
{
|
8024 |
+
"epoch": 0.15581577959100065,
|
8025 |
+
"grad_norm": 0.004025363363325596,
|
8026 |
+
"learning_rate": 0.00013731989963551913,
|
8027 |
+
"loss": 46.0,
|
8028 |
+
"step": 1141
|
8029 |
+
},
|
8030 |
+
{
|
8031 |
+
"epoch": 0.15595234030931002,
|
8032 |
+
"grad_norm": 0.003963345196098089,
|
8033 |
+
"learning_rate": 0.00013722240023313306,
|
8034 |
+
"loss": 46.0,
|
8035 |
+
"step": 1142
|
8036 |
+
},
|
8037 |
+
{
|
8038 |
+
"epoch": 0.1560889010276194,
|
8039 |
+
"grad_norm": 0.00040174927562475204,
|
8040 |
+
"learning_rate": 0.00013712485973832838,
|
8041 |
+
"loss": 46.0,
|
8042 |
+
"step": 1143
|
8043 |
+
},
|
8044 |
+
{
|
8045 |
+
"epoch": 0.15622546174592877,
|
8046 |
+
"grad_norm": 0.0013893769355490804,
|
8047 |
+
"learning_rate": 0.00013702727825878693,
|
8048 |
+
"loss": 46.0,
|
8049 |
+
"step": 1144
|
8050 |
+
},
|
8051 |
+
{
|
8052 |
+
"epoch": 0.15636202246423817,
|
8053 |
+
"grad_norm": 0.0008682573097757995,
|
8054 |
+
"learning_rate": 0.00013692965590223573,
|
8055 |
+
"loss": 46.0,
|
8056 |
+
"step": 1145
|
8057 |
+
},
|
8058 |
+
{
|
8059 |
+
"epoch": 0.15649858318254753,
|
8060 |
+
"grad_norm": 0.0005177369457669556,
|
8061 |
+
"learning_rate": 0.00013683199277644693,
|
8062 |
+
"loss": 46.0,
|
8063 |
+
"step": 1146
|
8064 |
+
},
|
8065 |
+
{
|
8066 |
+
"epoch": 0.15663514390085692,
|
8067 |
+
"grad_norm": 0.0004120862577110529,
|
8068 |
+
"learning_rate": 0.00013673428898923774,
|
8069 |
+
"loss": 46.0,
|
8070 |
+
"step": 1147
|
8071 |
+
},
|
8072 |
+
{
|
8073 |
+
"epoch": 0.15677170461916629,
|
8074 |
+
"grad_norm": 0.00036879064282402396,
|
8075 |
+
"learning_rate": 0.00013663654464847022,
|
8076 |
+
"loss": 46.0,
|
8077 |
+
"step": 1148
|
8078 |
+
},
|
8079 |
+
{
|
8080 |
+
"epoch": 0.15690826533747568,
|
8081 |
+
"grad_norm": 0.0005929148173891008,
|
8082 |
+
"learning_rate": 0.0001365387598620512,
|
8083 |
+
"loss": 46.0,
|
8084 |
+
"step": 1149
|
8085 |
+
},
|
8086 |
+
{
|
8087 |
+
"epoch": 0.15704482605578504,
|
8088 |
+
"grad_norm": 0.0009942365577444434,
|
8089 |
+
"learning_rate": 0.00013644093473793215,
|
8090 |
+
"loss": 46.0,
|
8091 |
+
"step": 1150
|
8092 |
+
},
|
8093 |
+
{
|
8094 |
+
"epoch": 0.15718138677409443,
|
8095 |
+
"grad_norm": 0.0005243360064923763,
|
8096 |
+
"learning_rate": 0.00013634306938410911,
|
8097 |
+
"loss": 46.0,
|
8098 |
+
"step": 1151
|
8099 |
+
},
|
8100 |
+
{
|
8101 |
+
"epoch": 0.1573179474924038,
|
8102 |
+
"grad_norm": 0.0006608903058804572,
|
8103 |
+
"learning_rate": 0.00013624516390862244,
|
8104 |
+
"loss": 46.0,
|
8105 |
+
"step": 1152
|
8106 |
+
},
|
8107 |
+
{
|
8108 |
+
"epoch": 0.1574545082107132,
|
8109 |
+
"grad_norm": 0.0007105504628270864,
|
8110 |
+
"learning_rate": 0.00013614721841955692,
|
8111 |
+
"loss": 46.0,
|
8112 |
+
"step": 1153
|
8113 |
+
},
|
8114 |
+
{
|
8115 |
+
"epoch": 0.15759106892902255,
|
8116 |
+
"grad_norm": 0.00046774803195148706,
|
8117 |
+
"learning_rate": 0.00013604923302504147,
|
8118 |
+
"loss": 46.0,
|
8119 |
+
"step": 1154
|
8120 |
+
},
|
8121 |
+
{
|
8122 |
+
"epoch": 0.15772762964733195,
|
8123 |
+
"grad_norm": 0.0005379539215937257,
|
8124 |
+
"learning_rate": 0.00013595120783324902,
|
8125 |
+
"loss": 46.0,
|
8126 |
+
"step": 1155
|
8127 |
+
},
|
8128 |
+
{
|
8129 |
+
"epoch": 0.1578641903656413,
|
8130 |
+
"grad_norm": 0.0004264324379619211,
|
8131 |
+
"learning_rate": 0.00013585314295239644,
|
8132 |
+
"loss": 46.0,
|
8133 |
+
"step": 1156
|
8134 |
+
},
|
8135 |
+
{
|
8136 |
+
"epoch": 0.1580007510839507,
|
8137 |
+
"grad_norm": 0.000680508092045784,
|
8138 |
+
"learning_rate": 0.00013575503849074444,
|
8139 |
+
"loss": 46.0,
|
8140 |
+
"step": 1157
|
8141 |
+
},
|
8142 |
+
{
|
8143 |
+
"epoch": 0.15813731180226007,
|
8144 |
+
"grad_norm": 0.0005256118020042777,
|
8145 |
+
"learning_rate": 0.0001356568945565974,
|
8146 |
+
"loss": 46.0,
|
8147 |
+
"step": 1158
|
8148 |
+
},
|
8149 |
+
{
|
8150 |
+
"epoch": 0.15827387252056946,
|
8151 |
+
"grad_norm": 0.001108917291276157,
|
8152 |
+
"learning_rate": 0.0001355587112583033,
|
8153 |
+
"loss": 46.0,
|
8154 |
+
"step": 1159
|
8155 |
+
},
|
8156 |
+
{
|
8157 |
+
"epoch": 0.15841043323887882,
|
8158 |
+
"grad_norm": 0.0004504511889535934,
|
8159 |
+
"learning_rate": 0.00013546048870425356,
|
8160 |
+
"loss": 46.0,
|
8161 |
+
"step": 1160
|
8162 |
+
},
|
8163 |
+
{
|
8164 |
+
"epoch": 0.15854699395718821,
|
8165 |
+
"grad_norm": 0.001876338617876172,
|
8166 |
+
"learning_rate": 0.00013536222700288303,
|
8167 |
+
"loss": 46.0,
|
8168 |
+
"step": 1161
|
8169 |
+
},
|
8170 |
+
{
|
8171 |
+
"epoch": 0.1586835546754976,
|
8172 |
+
"grad_norm": 0.0005848141154274344,
|
8173 |
+
"learning_rate": 0.00013526392626266956,
|
8174 |
+
"loss": 46.0,
|
8175 |
+
"step": 1162
|
8176 |
+
},
|
8177 |
+
{
|
8178 |
+
"epoch": 0.15882011539380697,
|
8179 |
+
"grad_norm": 0.000582265027333051,
|
8180 |
+
"learning_rate": 0.00013516558659213432,
|
8181 |
+
"loss": 46.0,
|
8182 |
+
"step": 1163
|
8183 |
+
},
|
8184 |
+
{
|
8185 |
+
"epoch": 0.15895667611211636,
|
8186 |
+
"grad_norm": 0.00048727478133514524,
|
8187 |
+
"learning_rate": 0.00013506720809984137,
|
8188 |
+
"loss": 46.0,
|
8189 |
+
"step": 1164
|
8190 |
+
},
|
8191 |
+
{
|
8192 |
+
"epoch": 0.15909323683042573,
|
8193 |
+
"grad_norm": 0.0004053419688716531,
|
8194 |
+
"learning_rate": 0.0001349687908943976,
|
8195 |
+
"loss": 46.0,
|
8196 |
+
"step": 1165
|
8197 |
+
},
|
8198 |
+
{
|
8199 |
+
"epoch": 0.15922979754873512,
|
8200 |
+
"grad_norm": 0.0012138515012338758,
|
8201 |
+
"learning_rate": 0.0001348703350844527,
|
8202 |
+
"loss": 46.0,
|
8203 |
+
"step": 1166
|
8204 |
+
},
|
8205 |
+
{
|
8206 |
+
"epoch": 0.15936635826704448,
|
8207 |
+
"grad_norm": 0.00035313217085786164,
|
8208 |
+
"learning_rate": 0.00013477184077869892,
|
8209 |
+
"loss": 46.0,
|
8210 |
+
"step": 1167
|
8211 |
+
},
|
8212 |
+
{
|
8213 |
+
"epoch": 0.15950291898535388,
|
8214 |
+
"grad_norm": 0.0007212344789877534,
|
8215 |
+
"learning_rate": 0.000134673308085871,
|
8216 |
+
"loss": 46.0,
|
8217 |
+
"step": 1168
|
8218 |
+
},
|
8219 |
+
{
|
8220 |
+
"epoch": 0.15963947970366324,
|
8221 |
+
"grad_norm": 0.0009634968009777367,
|
8222 |
+
"learning_rate": 0.0001345747371147461,
|
8223 |
+
"loss": 46.0,
|
8224 |
+
"step": 1169
|
8225 |
+
},
|
8226 |
+
{
|
8227 |
+
"epoch": 0.15977604042197263,
|
8228 |
+
"grad_norm": 0.0004901404026895761,
|
8229 |
+
"learning_rate": 0.0001344761279741437,
|
8230 |
+
"loss": 46.0,
|
8231 |
+
"step": 1170
|
8232 |
+
},
|
8233 |
+
{
|
8234 |
+
"epoch": 0.159912601140282,
|
8235 |
+
"grad_norm": 0.0008185001206584275,
|
8236 |
+
"learning_rate": 0.0001343774807729253,
|
8237 |
+
"loss": 46.0,
|
8238 |
+
"step": 1171
|
8239 |
+
},
|
8240 |
+
{
|
8241 |
+
"epoch": 0.1600491618585914,
|
8242 |
+
"grad_norm": 0.0004916120087727904,
|
8243 |
+
"learning_rate": 0.0001342787956199945,
|
8244 |
+
"loss": 46.0,
|
8245 |
+
"step": 1172
|
8246 |
+
},
|
8247 |
+
{
|
8248 |
+
"epoch": 0.16018572257690075,
|
8249 |
+
"grad_norm": 0.00023088046873454005,
|
8250 |
+
"learning_rate": 0.00013418007262429668,
|
8251 |
+
"loss": 46.0,
|
8252 |
+
"step": 1173
|
8253 |
+
},
|
8254 |
+
{
|
8255 |
+
"epoch": 0.16032228329521014,
|
8256 |
+
"grad_norm": 0.00026484086993150413,
|
8257 |
+
"learning_rate": 0.00013408131189481911,
|
8258 |
+
"loss": 46.0,
|
8259 |
+
"step": 1174
|
8260 |
+
},
|
8261 |
+
{
|
8262 |
+
"epoch": 0.1604588440135195,
|
8263 |
+
"grad_norm": 0.044743701815605164,
|
8264 |
+
"learning_rate": 0.00013398251354059077,
|
8265 |
+
"loss": 46.0,
|
8266 |
+
"step": 1175
|
8267 |
+
},
|
8268 |
+
{
|
8269 |
+
"epoch": 0.1605954047318289,
|
8270 |
+
"grad_norm": 0.0016944644739851356,
|
8271 |
+
"learning_rate": 0.000133883677670682,
|
8272 |
+
"loss": 46.0,
|
8273 |
+
"step": 1176
|
8274 |
+
},
|
8275 |
+
{
|
8276 |
+
"epoch": 0.16073196545013826,
|
8277 |
+
"grad_norm": 0.0002779974602162838,
|
8278 |
+
"learning_rate": 0.0001337848043942047,
|
8279 |
+
"loss": 46.0,
|
8280 |
+
"step": 1177
|
8281 |
+
},
|
8282 |
+
{
|
8283 |
+
"epoch": 0.16086852616844766,
|
8284 |
+
"grad_norm": 0.003036403562873602,
|
8285 |
+
"learning_rate": 0.00013368589382031196,
|
8286 |
+
"loss": 46.0,
|
8287 |
+
"step": 1178
|
8288 |
+
},
|
8289 |
+
{
|
8290 |
+
"epoch": 0.16100508688675702,
|
8291 |
+
"grad_norm": 0.000545320101082325,
|
8292 |
+
"learning_rate": 0.00013358694605819814,
|
8293 |
+
"loss": 46.0,
|
8294 |
+
"step": 1179
|
8295 |
+
},
|
8296 |
+
{
|
8297 |
+
"epoch": 0.1611416476050664,
|
8298 |
+
"grad_norm": 0.0005560341523960233,
|
8299 |
+
"learning_rate": 0.00013348796121709862,
|
8300 |
+
"loss": 46.0,
|
8301 |
+
"step": 1180
|
8302 |
+
},
|
8303 |
+
{
|
8304 |
+
"epoch": 0.16127820832337578,
|
8305 |
+
"grad_norm": 0.000386113504646346,
|
8306 |
+
"learning_rate": 0.00013338893940628973,
|
8307 |
+
"loss": 46.0,
|
8308 |
+
"step": 1181
|
8309 |
+
},
|
8310 |
+
{
|
8311 |
+
"epoch": 0.16141476904168517,
|
8312 |
+
"grad_norm": 0.00037342161522246897,
|
8313 |
+
"learning_rate": 0.00013328988073508852,
|
8314 |
+
"loss": 46.0,
|
8315 |
+
"step": 1182
|
8316 |
+
},
|
8317 |
+
{
|
8318 |
+
"epoch": 0.16155132975999453,
|
8319 |
+
"grad_norm": 0.0005269271787256002,
|
8320 |
+
"learning_rate": 0.00013319078531285285,
|
8321 |
+
"loss": 46.0,
|
8322 |
+
"step": 1183
|
8323 |
+
},
|
8324 |
+
{
|
8325 |
+
"epoch": 0.16168789047830393,
|
8326 |
+
"grad_norm": 0.0007852707640267909,
|
8327 |
+
"learning_rate": 0.00013309165324898112,
|
8328 |
+
"loss": 46.0,
|
8329 |
+
"step": 1184
|
8330 |
+
},
|
8331 |
+
{
|
8332 |
+
"epoch": 0.1618244511966133,
|
8333 |
+
"grad_norm": 0.0008284957148134708,
|
8334 |
+
"learning_rate": 0.00013299248465291214,
|
8335 |
+
"loss": 46.0,
|
8336 |
+
"step": 1185
|
8337 |
+
},
|
8338 |
+
{
|
8339 |
+
"epoch": 0.16196101191492268,
|
8340 |
+
"grad_norm": 0.001822237973101437,
|
8341 |
+
"learning_rate": 0.00013289327963412513,
|
8342 |
+
"loss": 46.0,
|
8343 |
+
"step": 1186
|
8344 |
+
},
|
8345 |
+
{
|
8346 |
+
"epoch": 0.16209757263323205,
|
8347 |
+
"grad_norm": 0.0009884198661893606,
|
8348 |
+
"learning_rate": 0.00013279403830213942,
|
8349 |
+
"loss": 46.0,
|
8350 |
+
"step": 1187
|
8351 |
+
},
|
8352 |
+
{
|
8353 |
+
"epoch": 0.16223413335154144,
|
8354 |
+
"grad_norm": 0.0014371919678524137,
|
8355 |
+
"learning_rate": 0.00013269476076651447,
|
8356 |
+
"loss": 46.0,
|
8357 |
+
"step": 1188
|
8358 |
+
},
|
8359 |
+
{
|
8360 |
+
"epoch": 0.1623706940698508,
|
8361 |
+
"grad_norm": 0.0016071486752480268,
|
8362 |
+
"learning_rate": 0.00013259544713684974,
|
8363 |
+
"loss": 46.0,
|
8364 |
+
"step": 1189
|
8365 |
+
},
|
8366 |
+
{
|
8367 |
+
"epoch": 0.1625072547881602,
|
8368 |
+
"grad_norm": 0.001745348796248436,
|
8369 |
+
"learning_rate": 0.00013249609752278454,
|
8370 |
+
"loss": 46.0,
|
8371 |
+
"step": 1190
|
8372 |
+
},
|
8373 |
+
{
|
8374 |
+
"epoch": 0.16264381550646956,
|
8375 |
+
"grad_norm": 0.0004350421077106148,
|
8376 |
+
"learning_rate": 0.0001323967120339978,
|
8377 |
+
"loss": 46.0,
|
8378 |
+
"step": 1191
|
8379 |
+
},
|
8380 |
+
{
|
8381 |
+
"epoch": 0.16278037622477895,
|
8382 |
+
"grad_norm": 0.0009462771704420447,
|
8383 |
+
"learning_rate": 0.00013229729078020823,
|
8384 |
+
"loss": 46.0,
|
8385 |
+
"step": 1192
|
8386 |
+
},
|
8387 |
+
{
|
8388 |
+
"epoch": 0.16291693694308831,
|
8389 |
+
"grad_norm": 0.0004913929151371121,
|
8390 |
+
"learning_rate": 0.00013219783387117385,
|
8391 |
+
"loss": 46.0,
|
8392 |
+
"step": 1193
|
8393 |
+
},
|
8394 |
+
{
|
8395 |
+
"epoch": 0.1630534976613977,
|
8396 |
+
"grad_norm": 0.0011134218657389283,
|
8397 |
+
"learning_rate": 0.00013209834141669213,
|
8398 |
+
"loss": 46.0,
|
8399 |
+
"step": 1194
|
8400 |
+
},
|
8401 |
+
{
|
8402 |
+
"epoch": 0.16319005837970707,
|
8403 |
+
"grad_norm": 0.0005660938913933933,
|
8404 |
+
"learning_rate": 0.0001319988135265998,
|
8405 |
+
"loss": 46.0,
|
8406 |
+
"step": 1195
|
8407 |
+
},
|
8408 |
+
{
|
8409 |
+
"epoch": 0.16332661909801646,
|
8410 |
+
"grad_norm": 0.0008015558705665171,
|
8411 |
+
"learning_rate": 0.00013189925031077267,
|
8412 |
+
"loss": 46.0,
|
8413 |
+
"step": 1196
|
8414 |
+
},
|
8415 |
+
{
|
8416 |
+
"epoch": 0.16346317981632583,
|
8417 |
+
"grad_norm": 0.001170705771073699,
|
8418 |
+
"learning_rate": 0.00013179965187912554,
|
8419 |
+
"loss": 46.0,
|
8420 |
+
"step": 1197
|
8421 |
+
},
|
8422 |
+
{
|
8423 |
+
"epoch": 0.16359974053463522,
|
8424 |
+
"grad_norm": 0.0021776033099740744,
|
8425 |
+
"learning_rate": 0.00013170001834161209,
|
8426 |
+
"loss": 46.0,
|
8427 |
+
"step": 1198
|
8428 |
+
},
|
8429 |
+
{
|
8430 |
+
"epoch": 0.16373630125294458,
|
8431 |
+
"grad_norm": 0.0012684693792834878,
|
8432 |
+
"learning_rate": 0.0001316003498082248,
|
8433 |
+
"loss": 46.0,
|
8434 |
+
"step": 1199
|
8435 |
+
},
|
8436 |
+
{
|
8437 |
+
"epoch": 0.16387286197125397,
|
8438 |
+
"grad_norm": 0.005284481681883335,
|
8439 |
+
"learning_rate": 0.0001315006463889948,
|
8440 |
+
"loss": 46.0,
|
8441 |
+
"step": 1200
|
8442 |
+
},
|
8443 |
+
{
|
8444 |
+
"epoch": 0.16387286197125397,
|
8445 |
+
"eval_loss": 11.5,
|
8446 |
+
"eval_runtime": 20.3187,
|
8447 |
+
"eval_samples_per_second": 151.781,
|
8448 |
+
"eval_steps_per_second": 75.891,
|
8449 |
+
"step": 1200
|
8450 |
}
|
8451 |
],
|
8452 |
"logging_steps": 1,
|
|
|
8461 |
"early_stopping_threshold": 0.0
|
8462 |
},
|
8463 |
"attributes": {
|
8464 |
+
"early_stopping_patience_counter": 3
|
8465 |
}
|
8466 |
},
|
8467 |
"TrainerControl": {
|
|
|
8470 |
"should_evaluate": false,
|
8471 |
"should_log": false,
|
8472 |
"should_save": true,
|
8473 |
+
"should_training_stop": true
|
8474 |
},
|
8475 |
"attributes": {}
|
8476 |
}
|
8477 |
},
|
8478 |
+
"total_flos": 42923841650688.0,
|
8479 |
"train_batch_size": 2,
|
8480 |
"trial_name": null,
|
8481 |
"trial_params": null
|