Upload folder using huggingface_hub
Browse files- adapter_model.safetensors +1 -1
- optimizer.pt +1 -1
- rng_state_0.pth +1 -1
- rng_state_1.pth +1 -1
- scheduler.pt +1 -1
- trainer_state.json +3153 -3
adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3999731048
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:418eaa322298bd15617cc6d66b44e9b3fa6ee1bafb7f2820a1f12c5b56eb7df5
|
3 |
size 3999731048
|
optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2886849310
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d56e469806b3be6b190b2cc19e283c71e41c769a9b33ab3ebfb260167a10e8bc
|
3 |
size 2886849310
|
rng_state_0.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14512
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4d73250db2dc829782eb0a5cd83f29381507104f3e10cc15651abbdbb2f50f65
|
3 |
size 14512
|
rng_state_1.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14512
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3c356471dd6c1140a14fb0468b306f6e8b4ab93984ac6f73dee134b5fe355e34
|
3 |
size 14512
|
scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6cf30fb7dd03bf3ad087862f8caa6268549947d5db04bcd0487c10da0b6f7a9e
|
3 |
size 1064
|
trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 500,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -5257,6 +5257,3156 @@
|
|
5257 |
"learning_rate": 1.953902888752305e-05,
|
5258 |
"loss": 1.6367,
|
5259 |
"step": 1500
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5260 |
}
|
5261 |
],
|
5262 |
"logging_steps": 2,
|
@@ -5276,7 +8426,7 @@
|
|
5276 |
"attributes": {}
|
5277 |
}
|
5278 |
},
|
5279 |
-
"total_flos":
|
5280 |
"train_batch_size": 8,
|
5281 |
"trial_name": null,
|
5282 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.18438844499078058,
|
5 |
"eval_steps": 500,
|
6 |
+
"global_step": 2400,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
5257 |
"learning_rate": 1.953902888752305e-05,
|
5258 |
"loss": 1.6367,
|
5259 |
"step": 1500
|
5260 |
+
},
|
5261 |
+
{
|
5262 |
+
"epoch": 0.11539643515673018,
|
5263 |
+
"grad_norm": 4.866142272949219,
|
5264 |
+
"learning_rate": 1.9538414259373083e-05,
|
5265 |
+
"loss": 1.6035,
|
5266 |
+
"step": 1502
|
5267 |
+
},
|
5268 |
+
{
|
5269 |
+
"epoch": 0.11555009219422249,
|
5270 |
+
"grad_norm": 3.647397756576538,
|
5271 |
+
"learning_rate": 1.953779963122311e-05,
|
5272 |
+
"loss": 1.5274,
|
5273 |
+
"step": 1504
|
5274 |
+
},
|
5275 |
+
{
|
5276 |
+
"epoch": 0.11570374923171481,
|
5277 |
+
"grad_norm": 3.999390125274658,
|
5278 |
+
"learning_rate": 1.9537185003073143e-05,
|
5279 |
+
"loss": 1.7818,
|
5280 |
+
"step": 1506
|
5281 |
+
},
|
5282 |
+
{
|
5283 |
+
"epoch": 0.11585740626920712,
|
5284 |
+
"grad_norm": 4.787381172180176,
|
5285 |
+
"learning_rate": 1.9536570374923172e-05,
|
5286 |
+
"loss": 1.7033,
|
5287 |
+
"step": 1508
|
5288 |
+
},
|
5289 |
+
{
|
5290 |
+
"epoch": 0.11601106330669944,
|
5291 |
+
"grad_norm": 4.415989398956299,
|
5292 |
+
"learning_rate": 1.9535955746773202e-05,
|
5293 |
+
"loss": 1.831,
|
5294 |
+
"step": 1510
|
5295 |
+
},
|
5296 |
+
{
|
5297 |
+
"epoch": 0.11616472034419176,
|
5298 |
+
"grad_norm": 4.548354148864746,
|
5299 |
+
"learning_rate": 1.9535341118623235e-05,
|
5300 |
+
"loss": 1.5145,
|
5301 |
+
"step": 1512
|
5302 |
+
},
|
5303 |
+
{
|
5304 |
+
"epoch": 0.11631837738168409,
|
5305 |
+
"grad_norm": 5.4493560791015625,
|
5306 |
+
"learning_rate": 1.9534726490473265e-05,
|
5307 |
+
"loss": 1.876,
|
5308 |
+
"step": 1514
|
5309 |
+
},
|
5310 |
+
{
|
5311 |
+
"epoch": 0.1164720344191764,
|
5312 |
+
"grad_norm": 3.9988834857940674,
|
5313 |
+
"learning_rate": 1.9534111862323295e-05,
|
5314 |
+
"loss": 1.6325,
|
5315 |
+
"step": 1516
|
5316 |
+
},
|
5317 |
+
{
|
5318 |
+
"epoch": 0.11662569145666872,
|
5319 |
+
"grad_norm": 4.861139297485352,
|
5320 |
+
"learning_rate": 1.9533497234173328e-05,
|
5321 |
+
"loss": 1.6743,
|
5322 |
+
"step": 1518
|
5323 |
+
},
|
5324 |
+
{
|
5325 |
+
"epoch": 0.11677934849416104,
|
5326 |
+
"grad_norm": 5.388833522796631,
|
5327 |
+
"learning_rate": 1.9532882606023357e-05,
|
5328 |
+
"loss": 1.5854,
|
5329 |
+
"step": 1520
|
5330 |
+
},
|
5331 |
+
{
|
5332 |
+
"epoch": 0.11693300553165335,
|
5333 |
+
"grad_norm": 4.772726058959961,
|
5334 |
+
"learning_rate": 1.953226797787339e-05,
|
5335 |
+
"loss": 1.6481,
|
5336 |
+
"step": 1522
|
5337 |
+
},
|
5338 |
+
{
|
5339 |
+
"epoch": 0.11708666256914567,
|
5340 |
+
"grad_norm": 4.285337924957275,
|
5341 |
+
"learning_rate": 1.9531653349723417e-05,
|
5342 |
+
"loss": 1.7054,
|
5343 |
+
"step": 1524
|
5344 |
+
},
|
5345 |
+
{
|
5346 |
+
"epoch": 0.11724031960663799,
|
5347 |
+
"grad_norm": 4.5872626304626465,
|
5348 |
+
"learning_rate": 1.953103872157345e-05,
|
5349 |
+
"loss": 1.6569,
|
5350 |
+
"step": 1526
|
5351 |
+
},
|
5352 |
+
{
|
5353 |
+
"epoch": 0.1173939766441303,
|
5354 |
+
"grad_norm": 4.3280463218688965,
|
5355 |
+
"learning_rate": 1.9530424093423483e-05,
|
5356 |
+
"loss": 1.5277,
|
5357 |
+
"step": 1528
|
5358 |
+
},
|
5359 |
+
{
|
5360 |
+
"epoch": 0.11754763368162262,
|
5361 |
+
"grad_norm": 4.480382919311523,
|
5362 |
+
"learning_rate": 1.952980946527351e-05,
|
5363 |
+
"loss": 1.7289,
|
5364 |
+
"step": 1530
|
5365 |
+
},
|
5366 |
+
{
|
5367 |
+
"epoch": 0.11770129071911493,
|
5368 |
+
"grad_norm": 4.207196235656738,
|
5369 |
+
"learning_rate": 1.9529194837123542e-05,
|
5370 |
+
"loss": 1.803,
|
5371 |
+
"step": 1532
|
5372 |
+
},
|
5373 |
+
{
|
5374 |
+
"epoch": 0.11785494775660725,
|
5375 |
+
"grad_norm": 4.125123023986816,
|
5376 |
+
"learning_rate": 1.9528580208973572e-05,
|
5377 |
+
"loss": 1.6289,
|
5378 |
+
"step": 1534
|
5379 |
+
},
|
5380 |
+
{
|
5381 |
+
"epoch": 0.11800860479409957,
|
5382 |
+
"grad_norm": 6.329103469848633,
|
5383 |
+
"learning_rate": 1.95279655808236e-05,
|
5384 |
+
"loss": 1.6592,
|
5385 |
+
"step": 1536
|
5386 |
+
},
|
5387 |
+
{
|
5388 |
+
"epoch": 0.11816226183159188,
|
5389 |
+
"grad_norm": 4.436602592468262,
|
5390 |
+
"learning_rate": 1.9527350952673635e-05,
|
5391 |
+
"loss": 1.5904,
|
5392 |
+
"step": 1538
|
5393 |
+
},
|
5394 |
+
{
|
5395 |
+
"epoch": 0.1183159188690842,
|
5396 |
+
"grad_norm": 4.564888954162598,
|
5397 |
+
"learning_rate": 1.9526736324523664e-05,
|
5398 |
+
"loss": 1.7565,
|
5399 |
+
"step": 1540
|
5400 |
+
},
|
5401 |
+
{
|
5402 |
+
"epoch": 0.11846957590657652,
|
5403 |
+
"grad_norm": 4.3771514892578125,
|
5404 |
+
"learning_rate": 1.9526121696373697e-05,
|
5405 |
+
"loss": 1.6746,
|
5406 |
+
"step": 1542
|
5407 |
+
},
|
5408 |
+
{
|
5409 |
+
"epoch": 0.11862323294406883,
|
5410 |
+
"grad_norm": 4.449161529541016,
|
5411 |
+
"learning_rate": 1.9525507068223727e-05,
|
5412 |
+
"loss": 1.6377,
|
5413 |
+
"step": 1544
|
5414 |
+
},
|
5415 |
+
{
|
5416 |
+
"epoch": 0.11877688998156116,
|
5417 |
+
"grad_norm": 4.770364761352539,
|
5418 |
+
"learning_rate": 1.9524892440073757e-05,
|
5419 |
+
"loss": 1.5953,
|
5420 |
+
"step": 1546
|
5421 |
+
},
|
5422 |
+
{
|
5423 |
+
"epoch": 0.11893054701905348,
|
5424 |
+
"grad_norm": 4.0749640464782715,
|
5425 |
+
"learning_rate": 1.952427781192379e-05,
|
5426 |
+
"loss": 1.5511,
|
5427 |
+
"step": 1548
|
5428 |
+
},
|
5429 |
+
{
|
5430 |
+
"epoch": 0.1190842040565458,
|
5431 |
+
"grad_norm": 4.361663341522217,
|
5432 |
+
"learning_rate": 1.9523663183773816e-05,
|
5433 |
+
"loss": 1.5562,
|
5434 |
+
"step": 1550
|
5435 |
+
},
|
5436 |
+
{
|
5437 |
+
"epoch": 0.11923786109403811,
|
5438 |
+
"grad_norm": 4.269155025482178,
|
5439 |
+
"learning_rate": 1.952304855562385e-05,
|
5440 |
+
"loss": 1.5725,
|
5441 |
+
"step": 1552
|
5442 |
+
},
|
5443 |
+
{
|
5444 |
+
"epoch": 0.11939151813153043,
|
5445 |
+
"grad_norm": 4.128551483154297,
|
5446 |
+
"learning_rate": 1.952243392747388e-05,
|
5447 |
+
"loss": 1.5,
|
5448 |
+
"step": 1554
|
5449 |
+
},
|
5450 |
+
{
|
5451 |
+
"epoch": 0.11954517516902274,
|
5452 |
+
"grad_norm": 4.763240814208984,
|
5453 |
+
"learning_rate": 1.952181929932391e-05,
|
5454 |
+
"loss": 1.7572,
|
5455 |
+
"step": 1556
|
5456 |
+
},
|
5457 |
+
{
|
5458 |
+
"epoch": 0.11969883220651506,
|
5459 |
+
"grad_norm": 4.871914386749268,
|
5460 |
+
"learning_rate": 1.952120467117394e-05,
|
5461 |
+
"loss": 1.609,
|
5462 |
+
"step": 1558
|
5463 |
+
},
|
5464 |
+
{
|
5465 |
+
"epoch": 0.11985248924400738,
|
5466 |
+
"grad_norm": 4.267725467681885,
|
5467 |
+
"learning_rate": 1.952059004302397e-05,
|
5468 |
+
"loss": 1.5832,
|
5469 |
+
"step": 1560
|
5470 |
+
},
|
5471 |
+
{
|
5472 |
+
"epoch": 0.12000614628149969,
|
5473 |
+
"grad_norm": 4.569482326507568,
|
5474 |
+
"learning_rate": 1.9519975414874e-05,
|
5475 |
+
"loss": 1.6262,
|
5476 |
+
"step": 1562
|
5477 |
+
},
|
5478 |
+
{
|
5479 |
+
"epoch": 0.12015980331899201,
|
5480 |
+
"grad_norm": 4.285094261169434,
|
5481 |
+
"learning_rate": 1.9519360786724034e-05,
|
5482 |
+
"loss": 1.7479,
|
5483 |
+
"step": 1564
|
5484 |
+
},
|
5485 |
+
{
|
5486 |
+
"epoch": 0.12031346035648433,
|
5487 |
+
"grad_norm": 4.529351234436035,
|
5488 |
+
"learning_rate": 1.9518746158574064e-05,
|
5489 |
+
"loss": 1.6297,
|
5490 |
+
"step": 1566
|
5491 |
+
},
|
5492 |
+
{
|
5493 |
+
"epoch": 0.12046711739397664,
|
5494 |
+
"grad_norm": 4.966389179229736,
|
5495 |
+
"learning_rate": 1.9518131530424097e-05,
|
5496 |
+
"loss": 1.7544,
|
5497 |
+
"step": 1568
|
5498 |
+
},
|
5499 |
+
{
|
5500 |
+
"epoch": 0.12062077443146896,
|
5501 |
+
"grad_norm": 4.608340263366699,
|
5502 |
+
"learning_rate": 1.9517516902274127e-05,
|
5503 |
+
"loss": 1.4635,
|
5504 |
+
"step": 1570
|
5505 |
+
},
|
5506 |
+
{
|
5507 |
+
"epoch": 0.12077443146896127,
|
5508 |
+
"grad_norm": 3.8790552616119385,
|
5509 |
+
"learning_rate": 1.9516902274124156e-05,
|
5510 |
+
"loss": 1.6345,
|
5511 |
+
"step": 1572
|
5512 |
+
},
|
5513 |
+
{
|
5514 |
+
"epoch": 0.12092808850645359,
|
5515 |
+
"grad_norm": 5.229369163513184,
|
5516 |
+
"learning_rate": 1.951628764597419e-05,
|
5517 |
+
"loss": 1.6829,
|
5518 |
+
"step": 1574
|
5519 |
+
},
|
5520 |
+
{
|
5521 |
+
"epoch": 0.1210817455439459,
|
5522 |
+
"grad_norm": 4.269663333892822,
|
5523 |
+
"learning_rate": 1.9515673017824216e-05,
|
5524 |
+
"loss": 1.568,
|
5525 |
+
"step": 1576
|
5526 |
+
},
|
5527 |
+
{
|
5528 |
+
"epoch": 0.12123540258143822,
|
5529 |
+
"grad_norm": 4.905238151550293,
|
5530 |
+
"learning_rate": 1.951505838967425e-05,
|
5531 |
+
"loss": 1.671,
|
5532 |
+
"step": 1578
|
5533 |
+
},
|
5534 |
+
{
|
5535 |
+
"epoch": 0.12138905961893055,
|
5536 |
+
"grad_norm": 4.5513596534729,
|
5537 |
+
"learning_rate": 1.951444376152428e-05,
|
5538 |
+
"loss": 1.6636,
|
5539 |
+
"step": 1580
|
5540 |
+
},
|
5541 |
+
{
|
5542 |
+
"epoch": 0.12154271665642287,
|
5543 |
+
"grad_norm": 4.586058616638184,
|
5544 |
+
"learning_rate": 1.9513829133374308e-05,
|
5545 |
+
"loss": 1.7669,
|
5546 |
+
"step": 1582
|
5547 |
+
},
|
5548 |
+
{
|
5549 |
+
"epoch": 0.12169637369391519,
|
5550 |
+
"grad_norm": 5.4855170249938965,
|
5551 |
+
"learning_rate": 1.951321450522434e-05,
|
5552 |
+
"loss": 1.5033,
|
5553 |
+
"step": 1584
|
5554 |
+
},
|
5555 |
+
{
|
5556 |
+
"epoch": 0.1218500307314075,
|
5557 |
+
"grad_norm": 4.668776035308838,
|
5558 |
+
"learning_rate": 1.951259987707437e-05,
|
5559 |
+
"loss": 1.5859,
|
5560 |
+
"step": 1586
|
5561 |
+
},
|
5562 |
+
{
|
5563 |
+
"epoch": 0.12200368776889982,
|
5564 |
+
"grad_norm": 3.9210376739501953,
|
5565 |
+
"learning_rate": 1.9511985248924404e-05,
|
5566 |
+
"loss": 1.5757,
|
5567 |
+
"step": 1588
|
5568 |
+
},
|
5569 |
+
{
|
5570 |
+
"epoch": 0.12215734480639213,
|
5571 |
+
"grad_norm": 4.558568000793457,
|
5572 |
+
"learning_rate": 1.9511370620774434e-05,
|
5573 |
+
"loss": 1.5945,
|
5574 |
+
"step": 1590
|
5575 |
+
},
|
5576 |
+
{
|
5577 |
+
"epoch": 0.12231100184388445,
|
5578 |
+
"grad_norm": 4.247246265411377,
|
5579 |
+
"learning_rate": 1.9510755992624463e-05,
|
5580 |
+
"loss": 1.624,
|
5581 |
+
"step": 1592
|
5582 |
+
},
|
5583 |
+
{
|
5584 |
+
"epoch": 0.12246465888137677,
|
5585 |
+
"grad_norm": 4.2471604347229,
|
5586 |
+
"learning_rate": 1.9510141364474496e-05,
|
5587 |
+
"loss": 1.5873,
|
5588 |
+
"step": 1594
|
5589 |
+
},
|
5590 |
+
{
|
5591 |
+
"epoch": 0.12261831591886908,
|
5592 |
+
"grad_norm": 4.362886428833008,
|
5593 |
+
"learning_rate": 1.9509526736324526e-05,
|
5594 |
+
"loss": 1.7448,
|
5595 |
+
"step": 1596
|
5596 |
+
},
|
5597 |
+
{
|
5598 |
+
"epoch": 0.1227719729563614,
|
5599 |
+
"grad_norm": 5.111678123474121,
|
5600 |
+
"learning_rate": 1.9508912108174556e-05,
|
5601 |
+
"loss": 1.8134,
|
5602 |
+
"step": 1598
|
5603 |
+
},
|
5604 |
+
{
|
5605 |
+
"epoch": 0.12292562999385372,
|
5606 |
+
"grad_norm": 4.4582624435424805,
|
5607 |
+
"learning_rate": 1.950829748002459e-05,
|
5608 |
+
"loss": 1.7155,
|
5609 |
+
"step": 1600
|
5610 |
+
},
|
5611 |
+
{
|
5612 |
+
"epoch": 0.12307928703134603,
|
5613 |
+
"grad_norm": 3.796780586242676,
|
5614 |
+
"learning_rate": 1.9507682851874615e-05,
|
5615 |
+
"loss": 1.5636,
|
5616 |
+
"step": 1602
|
5617 |
+
},
|
5618 |
+
{
|
5619 |
+
"epoch": 0.12323294406883835,
|
5620 |
+
"grad_norm": 4.517824649810791,
|
5621 |
+
"learning_rate": 1.9507068223724648e-05,
|
5622 |
+
"loss": 1.6092,
|
5623 |
+
"step": 1604
|
5624 |
+
},
|
5625 |
+
{
|
5626 |
+
"epoch": 0.12338660110633067,
|
5627 |
+
"grad_norm": 4.659684181213379,
|
5628 |
+
"learning_rate": 1.9506453595574678e-05,
|
5629 |
+
"loss": 1.641,
|
5630 |
+
"step": 1606
|
5631 |
+
},
|
5632 |
+
{
|
5633 |
+
"epoch": 0.12354025814382298,
|
5634 |
+
"grad_norm": 4.470782279968262,
|
5635 |
+
"learning_rate": 1.950583896742471e-05,
|
5636 |
+
"loss": 1.618,
|
5637 |
+
"step": 1608
|
5638 |
+
},
|
5639 |
+
{
|
5640 |
+
"epoch": 0.1236939151813153,
|
5641 |
+
"grad_norm": 4.486400604248047,
|
5642 |
+
"learning_rate": 1.950522433927474e-05,
|
5643 |
+
"loss": 1.6912,
|
5644 |
+
"step": 1610
|
5645 |
+
},
|
5646 |
+
{
|
5647 |
+
"epoch": 0.12384757221880763,
|
5648 |
+
"grad_norm": 4.459258556365967,
|
5649 |
+
"learning_rate": 1.950460971112477e-05,
|
5650 |
+
"loss": 1.5627,
|
5651 |
+
"step": 1612
|
5652 |
+
},
|
5653 |
+
{
|
5654 |
+
"epoch": 0.12400122925629994,
|
5655 |
+
"grad_norm": 4.486885070800781,
|
5656 |
+
"learning_rate": 1.9503995082974803e-05,
|
5657 |
+
"loss": 1.8642,
|
5658 |
+
"step": 1614
|
5659 |
+
},
|
5660 |
+
{
|
5661 |
+
"epoch": 0.12415488629379226,
|
5662 |
+
"grad_norm": 4.576472282409668,
|
5663 |
+
"learning_rate": 1.9503380454824833e-05,
|
5664 |
+
"loss": 1.6411,
|
5665 |
+
"step": 1616
|
5666 |
+
},
|
5667 |
+
{
|
5668 |
+
"epoch": 0.12430854333128458,
|
5669 |
+
"grad_norm": 4.349391460418701,
|
5670 |
+
"learning_rate": 1.9502765826674863e-05,
|
5671 |
+
"loss": 1.6382,
|
5672 |
+
"step": 1618
|
5673 |
+
},
|
5674 |
+
{
|
5675 |
+
"epoch": 0.12446220036877689,
|
5676 |
+
"grad_norm": 4.264526844024658,
|
5677 |
+
"learning_rate": 1.9502151198524896e-05,
|
5678 |
+
"loss": 1.621,
|
5679 |
+
"step": 1620
|
5680 |
+
},
|
5681 |
+
{
|
5682 |
+
"epoch": 0.12461585740626921,
|
5683 |
+
"grad_norm": 4.798770904541016,
|
5684 |
+
"learning_rate": 1.9501536570374925e-05,
|
5685 |
+
"loss": 1.8124,
|
5686 |
+
"step": 1622
|
5687 |
+
},
|
5688 |
+
{
|
5689 |
+
"epoch": 0.12476951444376153,
|
5690 |
+
"grad_norm": 3.747992515563965,
|
5691 |
+
"learning_rate": 1.9500921942224955e-05,
|
5692 |
+
"loss": 1.516,
|
5693 |
+
"step": 1624
|
5694 |
+
},
|
5695 |
+
{
|
5696 |
+
"epoch": 0.12492317148125384,
|
5697 |
+
"grad_norm": 4.410411834716797,
|
5698 |
+
"learning_rate": 1.9500307314074988e-05,
|
5699 |
+
"loss": 1.5645,
|
5700 |
+
"step": 1626
|
5701 |
+
},
|
5702 |
+
{
|
5703 |
+
"epoch": 0.12507682851874616,
|
5704 |
+
"grad_norm": 4.139060020446777,
|
5705 |
+
"learning_rate": 1.9499692685925018e-05,
|
5706 |
+
"loss": 1.6217,
|
5707 |
+
"step": 1628
|
5708 |
+
},
|
5709 |
+
{
|
5710 |
+
"epoch": 0.12523048555623847,
|
5711 |
+
"grad_norm": 4.380125045776367,
|
5712 |
+
"learning_rate": 1.9499078057775048e-05,
|
5713 |
+
"loss": 1.6909,
|
5714 |
+
"step": 1630
|
5715 |
+
},
|
5716 |
+
{
|
5717 |
+
"epoch": 0.1253841425937308,
|
5718 |
+
"grad_norm": 4.449796676635742,
|
5719 |
+
"learning_rate": 1.9498463429625077e-05,
|
5720 |
+
"loss": 1.7215,
|
5721 |
+
"step": 1632
|
5722 |
+
},
|
5723 |
+
{
|
5724 |
+
"epoch": 0.1255377996312231,
|
5725 |
+
"grad_norm": 4.043376922607422,
|
5726 |
+
"learning_rate": 1.949784880147511e-05,
|
5727 |
+
"loss": 1.6326,
|
5728 |
+
"step": 1634
|
5729 |
+
},
|
5730 |
+
{
|
5731 |
+
"epoch": 0.12569145666871542,
|
5732 |
+
"grad_norm": 4.427875518798828,
|
5733 |
+
"learning_rate": 1.949723417332514e-05,
|
5734 |
+
"loss": 1.6962,
|
5735 |
+
"step": 1636
|
5736 |
+
},
|
5737 |
+
{
|
5738 |
+
"epoch": 0.12584511370620774,
|
5739 |
+
"grad_norm": 4.617554187774658,
|
5740 |
+
"learning_rate": 1.949661954517517e-05,
|
5741 |
+
"loss": 1.5711,
|
5742 |
+
"step": 1638
|
5743 |
+
},
|
5744 |
+
{
|
5745 |
+
"epoch": 0.12599877074370006,
|
5746 |
+
"grad_norm": 4.245482444763184,
|
5747 |
+
"learning_rate": 1.9496004917025203e-05,
|
5748 |
+
"loss": 1.6479,
|
5749 |
+
"step": 1640
|
5750 |
+
},
|
5751 |
+
{
|
5752 |
+
"epoch": 0.12615242778119237,
|
5753 |
+
"grad_norm": 4.876771926879883,
|
5754 |
+
"learning_rate": 1.9495390288875232e-05,
|
5755 |
+
"loss": 1.7238,
|
5756 |
+
"step": 1642
|
5757 |
+
},
|
5758 |
+
{
|
5759 |
+
"epoch": 0.1263060848186847,
|
5760 |
+
"grad_norm": 4.263737678527832,
|
5761 |
+
"learning_rate": 1.9494775660725262e-05,
|
5762 |
+
"loss": 1.5666,
|
5763 |
+
"step": 1644
|
5764 |
+
},
|
5765 |
+
{
|
5766 |
+
"epoch": 0.126459741856177,
|
5767 |
+
"grad_norm": 6.202945232391357,
|
5768 |
+
"learning_rate": 1.9494161032575295e-05,
|
5769 |
+
"loss": 1.6217,
|
5770 |
+
"step": 1646
|
5771 |
+
},
|
5772 |
+
{
|
5773 |
+
"epoch": 0.12661339889366932,
|
5774 |
+
"grad_norm": 4.307828426361084,
|
5775 |
+
"learning_rate": 1.9493546404425325e-05,
|
5776 |
+
"loss": 1.49,
|
5777 |
+
"step": 1648
|
5778 |
+
},
|
5779 |
+
{
|
5780 |
+
"epoch": 0.12676705593116164,
|
5781 |
+
"grad_norm": 4.122886657714844,
|
5782 |
+
"learning_rate": 1.9492931776275355e-05,
|
5783 |
+
"loss": 1.7121,
|
5784 |
+
"step": 1650
|
5785 |
+
},
|
5786 |
+
{
|
5787 |
+
"epoch": 0.12692071296865395,
|
5788 |
+
"grad_norm": 4.3632426261901855,
|
5789 |
+
"learning_rate": 1.9492317148125384e-05,
|
5790 |
+
"loss": 1.6835,
|
5791 |
+
"step": 1652
|
5792 |
+
},
|
5793 |
+
{
|
5794 |
+
"epoch": 0.12707437000614627,
|
5795 |
+
"grad_norm": 4.4186625480651855,
|
5796 |
+
"learning_rate": 1.9491702519975417e-05,
|
5797 |
+
"loss": 1.7579,
|
5798 |
+
"step": 1654
|
5799 |
+
},
|
5800 |
+
{
|
5801 |
+
"epoch": 0.1272280270436386,
|
5802 |
+
"grad_norm": 4.411682605743408,
|
5803 |
+
"learning_rate": 1.9491087891825447e-05,
|
5804 |
+
"loss": 1.5771,
|
5805 |
+
"step": 1656
|
5806 |
+
},
|
5807 |
+
{
|
5808 |
+
"epoch": 0.1273816840811309,
|
5809 |
+
"grad_norm": 4.259854316711426,
|
5810 |
+
"learning_rate": 1.9490473263675477e-05,
|
5811 |
+
"loss": 1.6239,
|
5812 |
+
"step": 1658
|
5813 |
+
},
|
5814 |
+
{
|
5815 |
+
"epoch": 0.12753534111862325,
|
5816 |
+
"grad_norm": 4.225386619567871,
|
5817 |
+
"learning_rate": 1.948985863552551e-05,
|
5818 |
+
"loss": 1.6777,
|
5819 |
+
"step": 1660
|
5820 |
+
},
|
5821 |
+
{
|
5822 |
+
"epoch": 0.12768899815611556,
|
5823 |
+
"grad_norm": 4.977676868438721,
|
5824 |
+
"learning_rate": 1.948924400737554e-05,
|
5825 |
+
"loss": 1.6166,
|
5826 |
+
"step": 1662
|
5827 |
+
},
|
5828 |
+
{
|
5829 |
+
"epoch": 0.12784265519360788,
|
5830 |
+
"grad_norm": 3.7306509017944336,
|
5831 |
+
"learning_rate": 1.948862937922557e-05,
|
5832 |
+
"loss": 1.5834,
|
5833 |
+
"step": 1664
|
5834 |
+
},
|
5835 |
+
{
|
5836 |
+
"epoch": 0.1279963122311002,
|
5837 |
+
"grad_norm": 4.451853275299072,
|
5838 |
+
"learning_rate": 1.9488014751075602e-05,
|
5839 |
+
"loss": 1.6464,
|
5840 |
+
"step": 1666
|
5841 |
+
},
|
5842 |
+
{
|
5843 |
+
"epoch": 0.1281499692685925,
|
5844 |
+
"grad_norm": 4.641234397888184,
|
5845 |
+
"learning_rate": 1.9487400122925632e-05,
|
5846 |
+
"loss": 1.6698,
|
5847 |
+
"step": 1668
|
5848 |
+
},
|
5849 |
+
{
|
5850 |
+
"epoch": 0.12830362630608483,
|
5851 |
+
"grad_norm": 5.218206882476807,
|
5852 |
+
"learning_rate": 1.948678549477566e-05,
|
5853 |
+
"loss": 1.6614,
|
5854 |
+
"step": 1670
|
5855 |
+
},
|
5856 |
+
{
|
5857 |
+
"epoch": 0.12845728334357714,
|
5858 |
+
"grad_norm": 4.623648166656494,
|
5859 |
+
"learning_rate": 1.9486170866625695e-05,
|
5860 |
+
"loss": 1.6586,
|
5861 |
+
"step": 1672
|
5862 |
+
},
|
5863 |
+
{
|
5864 |
+
"epoch": 0.12861094038106946,
|
5865 |
+
"grad_norm": 5.1708478927612305,
|
5866 |
+
"learning_rate": 1.9485556238475724e-05,
|
5867 |
+
"loss": 1.6275,
|
5868 |
+
"step": 1674
|
5869 |
+
},
|
5870 |
+
{
|
5871 |
+
"epoch": 0.12876459741856178,
|
5872 |
+
"grad_norm": 4.305856227874756,
|
5873 |
+
"learning_rate": 1.9484941610325754e-05,
|
5874 |
+
"loss": 1.6349,
|
5875 |
+
"step": 1676
|
5876 |
+
},
|
5877 |
+
{
|
5878 |
+
"epoch": 0.1289182544560541,
|
5879 |
+
"grad_norm": 4.788485050201416,
|
5880 |
+
"learning_rate": 1.9484326982175784e-05,
|
5881 |
+
"loss": 1.4676,
|
5882 |
+
"step": 1678
|
5883 |
+
},
|
5884 |
+
{
|
5885 |
+
"epoch": 0.1290719114935464,
|
5886 |
+
"grad_norm": 4.4581379890441895,
|
5887 |
+
"learning_rate": 1.9483712354025817e-05,
|
5888 |
+
"loss": 1.5062,
|
5889 |
+
"step": 1680
|
5890 |
+
},
|
5891 |
+
{
|
5892 |
+
"epoch": 0.12922556853103873,
|
5893 |
+
"grad_norm": 3.9021549224853516,
|
5894 |
+
"learning_rate": 1.9483097725875846e-05,
|
5895 |
+
"loss": 1.7848,
|
5896 |
+
"step": 1682
|
5897 |
+
},
|
5898 |
+
{
|
5899 |
+
"epoch": 0.12937922556853104,
|
5900 |
+
"grad_norm": 4.530584812164307,
|
5901 |
+
"learning_rate": 1.9482483097725876e-05,
|
5902 |
+
"loss": 1.6594,
|
5903 |
+
"step": 1684
|
5904 |
+
},
|
5905 |
+
{
|
5906 |
+
"epoch": 0.12953288260602336,
|
5907 |
+
"grad_norm": 4.8017497062683105,
|
5908 |
+
"learning_rate": 1.948186846957591e-05,
|
5909 |
+
"loss": 1.6167,
|
5910 |
+
"step": 1686
|
5911 |
+
},
|
5912 |
+
{
|
5913 |
+
"epoch": 0.12968653964351567,
|
5914 |
+
"grad_norm": 4.41823148727417,
|
5915 |
+
"learning_rate": 1.948125384142594e-05,
|
5916 |
+
"loss": 1.5293,
|
5917 |
+
"step": 1688
|
5918 |
+
},
|
5919 |
+
{
|
5920 |
+
"epoch": 0.129840196681008,
|
5921 |
+
"grad_norm": 4.470682144165039,
|
5922 |
+
"learning_rate": 1.948063921327597e-05,
|
5923 |
+
"loss": 1.6036,
|
5924 |
+
"step": 1690
|
5925 |
+
},
|
5926 |
+
{
|
5927 |
+
"epoch": 0.1299938537185003,
|
5928 |
+
"grad_norm": 3.947842597961426,
|
5929 |
+
"learning_rate": 1.9480024585126e-05,
|
5930 |
+
"loss": 1.7037,
|
5931 |
+
"step": 1692
|
5932 |
+
},
|
5933 |
+
{
|
5934 |
+
"epoch": 0.13014751075599262,
|
5935 |
+
"grad_norm": 4.953098297119141,
|
5936 |
+
"learning_rate": 1.947940995697603e-05,
|
5937 |
+
"loss": 1.6436,
|
5938 |
+
"step": 1694
|
5939 |
+
},
|
5940 |
+
{
|
5941 |
+
"epoch": 0.13030116779348494,
|
5942 |
+
"grad_norm": 4.112635135650635,
|
5943 |
+
"learning_rate": 1.947879532882606e-05,
|
5944 |
+
"loss": 1.4595,
|
5945 |
+
"step": 1696
|
5946 |
+
},
|
5947 |
+
{
|
5948 |
+
"epoch": 0.13045482483097726,
|
5949 |
+
"grad_norm": 4.197033882141113,
|
5950 |
+
"learning_rate": 1.9478180700676094e-05,
|
5951 |
+
"loss": 1.657,
|
5952 |
+
"step": 1698
|
5953 |
+
},
|
5954 |
+
{
|
5955 |
+
"epoch": 0.13060848186846957,
|
5956 |
+
"grad_norm": 4.02692985534668,
|
5957 |
+
"learning_rate": 1.9477566072526124e-05,
|
5958 |
+
"loss": 1.6321,
|
5959 |
+
"step": 1700
|
5960 |
+
},
|
5961 |
+
{
|
5962 |
+
"epoch": 0.1307621389059619,
|
5963 |
+
"grad_norm": 4.7861809730529785,
|
5964 |
+
"learning_rate": 1.9476951444376153e-05,
|
5965 |
+
"loss": 1.5609,
|
5966 |
+
"step": 1702
|
5967 |
+
},
|
5968 |
+
{
|
5969 |
+
"epoch": 0.1309157959434542,
|
5970 |
+
"grad_norm": 4.392903804779053,
|
5971 |
+
"learning_rate": 1.9476336816226183e-05,
|
5972 |
+
"loss": 1.7549,
|
5973 |
+
"step": 1704
|
5974 |
+
},
|
5975 |
+
{
|
5976 |
+
"epoch": 0.13106945298094652,
|
5977 |
+
"grad_norm": 4.314429759979248,
|
5978 |
+
"learning_rate": 1.9475722188076216e-05,
|
5979 |
+
"loss": 1.5698,
|
5980 |
+
"step": 1706
|
5981 |
+
},
|
5982 |
+
{
|
5983 |
+
"epoch": 0.13122311001843884,
|
5984 |
+
"grad_norm": 4.254858016967773,
|
5985 |
+
"learning_rate": 1.9475107559926246e-05,
|
5986 |
+
"loss": 1.6291,
|
5987 |
+
"step": 1708
|
5988 |
+
},
|
5989 |
+
{
|
5990 |
+
"epoch": 0.13137676705593115,
|
5991 |
+
"grad_norm": 4.288058757781982,
|
5992 |
+
"learning_rate": 1.9474492931776276e-05,
|
5993 |
+
"loss": 1.822,
|
5994 |
+
"step": 1710
|
5995 |
+
},
|
5996 |
+
{
|
5997 |
+
"epoch": 0.13153042409342347,
|
5998 |
+
"grad_norm": 4.206986904144287,
|
5999 |
+
"learning_rate": 1.947387830362631e-05,
|
6000 |
+
"loss": 1.6372,
|
6001 |
+
"step": 1712
|
6002 |
+
},
|
6003 |
+
{
|
6004 |
+
"epoch": 0.1316840811309158,
|
6005 |
+
"grad_norm": 3.9056224822998047,
|
6006 |
+
"learning_rate": 1.947326367547634e-05,
|
6007 |
+
"loss": 1.6141,
|
6008 |
+
"step": 1714
|
6009 |
+
},
|
6010 |
+
{
|
6011 |
+
"epoch": 0.1318377381684081,
|
6012 |
+
"grad_norm": 5.1152777671813965,
|
6013 |
+
"learning_rate": 1.9472649047326368e-05,
|
6014 |
+
"loss": 1.6361,
|
6015 |
+
"step": 1716
|
6016 |
+
},
|
6017 |
+
{
|
6018 |
+
"epoch": 0.13199139520590042,
|
6019 |
+
"grad_norm": 4.0903120040893555,
|
6020 |
+
"learning_rate": 1.94720344191764e-05,
|
6021 |
+
"loss": 1.5559,
|
6022 |
+
"step": 1718
|
6023 |
+
},
|
6024 |
+
{
|
6025 |
+
"epoch": 0.13214505224339274,
|
6026 |
+
"grad_norm": 4.825276851654053,
|
6027 |
+
"learning_rate": 1.947141979102643e-05,
|
6028 |
+
"loss": 1.732,
|
6029 |
+
"step": 1720
|
6030 |
+
},
|
6031 |
+
{
|
6032 |
+
"epoch": 0.13229870928088505,
|
6033 |
+
"grad_norm": 4.649293899536133,
|
6034 |
+
"learning_rate": 1.947080516287646e-05,
|
6035 |
+
"loss": 1.5941,
|
6036 |
+
"step": 1722
|
6037 |
+
},
|
6038 |
+
{
|
6039 |
+
"epoch": 0.13245236631837737,
|
6040 |
+
"grad_norm": 4.052992820739746,
|
6041 |
+
"learning_rate": 1.9470190534726494e-05,
|
6042 |
+
"loss": 1.5664,
|
6043 |
+
"step": 1724
|
6044 |
+
},
|
6045 |
+
{
|
6046 |
+
"epoch": 0.1326060233558697,
|
6047 |
+
"grad_norm": 4.36129903793335,
|
6048 |
+
"learning_rate": 1.9469575906576523e-05,
|
6049 |
+
"loss": 1.7345,
|
6050 |
+
"step": 1726
|
6051 |
+
},
|
6052 |
+
{
|
6053 |
+
"epoch": 0.13275968039336203,
|
6054 |
+
"grad_norm": 4.522770404815674,
|
6055 |
+
"learning_rate": 1.9468961278426553e-05,
|
6056 |
+
"loss": 1.6731,
|
6057 |
+
"step": 1728
|
6058 |
+
},
|
6059 |
+
{
|
6060 |
+
"epoch": 0.13291333743085434,
|
6061 |
+
"grad_norm": 4.922299385070801,
|
6062 |
+
"learning_rate": 1.9468346650276583e-05,
|
6063 |
+
"loss": 1.8072,
|
6064 |
+
"step": 1730
|
6065 |
+
},
|
6066 |
+
{
|
6067 |
+
"epoch": 0.13306699446834666,
|
6068 |
+
"grad_norm": 4.385134220123291,
|
6069 |
+
"learning_rate": 1.9467732022126616e-05,
|
6070 |
+
"loss": 1.5836,
|
6071 |
+
"step": 1732
|
6072 |
+
},
|
6073 |
+
{
|
6074 |
+
"epoch": 0.13322065150583898,
|
6075 |
+
"grad_norm": 4.031277179718018,
|
6076 |
+
"learning_rate": 1.9467117393976645e-05,
|
6077 |
+
"loss": 1.465,
|
6078 |
+
"step": 1734
|
6079 |
+
},
|
6080 |
+
{
|
6081 |
+
"epoch": 0.1333743085433313,
|
6082 |
+
"grad_norm": 4.437002182006836,
|
6083 |
+
"learning_rate": 1.9466502765826675e-05,
|
6084 |
+
"loss": 1.5624,
|
6085 |
+
"step": 1736
|
6086 |
+
},
|
6087 |
+
{
|
6088 |
+
"epoch": 0.1335279655808236,
|
6089 |
+
"grad_norm": 3.754696846008301,
|
6090 |
+
"learning_rate": 1.9465888137676708e-05,
|
6091 |
+
"loss": 1.6044,
|
6092 |
+
"step": 1738
|
6093 |
+
},
|
6094 |
+
{
|
6095 |
+
"epoch": 0.13368162261831593,
|
6096 |
+
"grad_norm": 3.967130661010742,
|
6097 |
+
"learning_rate": 1.9465273509526738e-05,
|
6098 |
+
"loss": 1.5832,
|
6099 |
+
"step": 1740
|
6100 |
+
},
|
6101 |
+
{
|
6102 |
+
"epoch": 0.13383527965580824,
|
6103 |
+
"grad_norm": 3.958448648452759,
|
6104 |
+
"learning_rate": 1.9464658881376767e-05,
|
6105 |
+
"loss": 1.6812,
|
6106 |
+
"step": 1742
|
6107 |
+
},
|
6108 |
+
{
|
6109 |
+
"epoch": 0.13398893669330056,
|
6110 |
+
"grad_norm": 5.2511982917785645,
|
6111 |
+
"learning_rate": 1.94640442532268e-05,
|
6112 |
+
"loss": 1.8565,
|
6113 |
+
"step": 1744
|
6114 |
+
},
|
6115 |
+
{
|
6116 |
+
"epoch": 0.13414259373079288,
|
6117 |
+
"grad_norm": 4.229193210601807,
|
6118 |
+
"learning_rate": 1.946342962507683e-05,
|
6119 |
+
"loss": 1.6365,
|
6120 |
+
"step": 1746
|
6121 |
+
},
|
6122 |
+
{
|
6123 |
+
"epoch": 0.1342962507682852,
|
6124 |
+
"grad_norm": 3.8518741130828857,
|
6125 |
+
"learning_rate": 1.946281499692686e-05,
|
6126 |
+
"loss": 1.571,
|
6127 |
+
"step": 1748
|
6128 |
+
},
|
6129 |
+
{
|
6130 |
+
"epoch": 0.1344499078057775,
|
6131 |
+
"grad_norm": 4.383627414703369,
|
6132 |
+
"learning_rate": 1.946220036877689e-05,
|
6133 |
+
"loss": 1.5181,
|
6134 |
+
"step": 1750
|
6135 |
+
},
|
6136 |
+
{
|
6137 |
+
"epoch": 0.13460356484326982,
|
6138 |
+
"grad_norm": 4.58341121673584,
|
6139 |
+
"learning_rate": 1.9461585740626923e-05,
|
6140 |
+
"loss": 1.6387,
|
6141 |
+
"step": 1752
|
6142 |
+
},
|
6143 |
+
{
|
6144 |
+
"epoch": 0.13475722188076214,
|
6145 |
+
"grad_norm": 4.656858921051025,
|
6146 |
+
"learning_rate": 1.9460971112476956e-05,
|
6147 |
+
"loss": 1.6474,
|
6148 |
+
"step": 1754
|
6149 |
+
},
|
6150 |
+
{
|
6151 |
+
"epoch": 0.13491087891825446,
|
6152 |
+
"grad_norm": 5.039700031280518,
|
6153 |
+
"learning_rate": 1.9460356484326982e-05,
|
6154 |
+
"loss": 1.6755,
|
6155 |
+
"step": 1756
|
6156 |
+
},
|
6157 |
+
{
|
6158 |
+
"epoch": 0.13506453595574677,
|
6159 |
+
"grad_norm": 4.46349573135376,
|
6160 |
+
"learning_rate": 1.9459741856177015e-05,
|
6161 |
+
"loss": 1.5728,
|
6162 |
+
"step": 1758
|
6163 |
+
},
|
6164 |
+
{
|
6165 |
+
"epoch": 0.1352181929932391,
|
6166 |
+
"grad_norm": 4.041154861450195,
|
6167 |
+
"learning_rate": 1.9459127228027045e-05,
|
6168 |
+
"loss": 1.5847,
|
6169 |
+
"step": 1760
|
6170 |
+
},
|
6171 |
+
{
|
6172 |
+
"epoch": 0.1353718500307314,
|
6173 |
+
"grad_norm": 4.126910209655762,
|
6174 |
+
"learning_rate": 1.9458512599877074e-05,
|
6175 |
+
"loss": 1.6807,
|
6176 |
+
"step": 1762
|
6177 |
+
},
|
6178 |
+
{
|
6179 |
+
"epoch": 0.13552550706822372,
|
6180 |
+
"grad_norm": 4.063604831695557,
|
6181 |
+
"learning_rate": 1.9457897971727108e-05,
|
6182 |
+
"loss": 1.5294,
|
6183 |
+
"step": 1764
|
6184 |
+
},
|
6185 |
+
{
|
6186 |
+
"epoch": 0.13567916410571604,
|
6187 |
+
"grad_norm": 4.1347150802612305,
|
6188 |
+
"learning_rate": 1.9457283343577137e-05,
|
6189 |
+
"loss": 1.5728,
|
6190 |
+
"step": 1766
|
6191 |
+
},
|
6192 |
+
{
|
6193 |
+
"epoch": 0.13583282114320835,
|
6194 |
+
"grad_norm": 4.593793869018555,
|
6195 |
+
"learning_rate": 1.9456668715427167e-05,
|
6196 |
+
"loss": 1.7155,
|
6197 |
+
"step": 1768
|
6198 |
+
},
|
6199 |
+
{
|
6200 |
+
"epoch": 0.13598647818070067,
|
6201 |
+
"grad_norm": 4.340649127960205,
|
6202 |
+
"learning_rate": 1.94560540872772e-05,
|
6203 |
+
"loss": 1.6996,
|
6204 |
+
"step": 1770
|
6205 |
+
},
|
6206 |
+
{
|
6207 |
+
"epoch": 0.136140135218193,
|
6208 |
+
"grad_norm": 4.278517246246338,
|
6209 |
+
"learning_rate": 1.945543945912723e-05,
|
6210 |
+
"loss": 1.6012,
|
6211 |
+
"step": 1772
|
6212 |
+
},
|
6213 |
+
{
|
6214 |
+
"epoch": 0.1362937922556853,
|
6215 |
+
"grad_norm": 4.626030445098877,
|
6216 |
+
"learning_rate": 1.9454824830977263e-05,
|
6217 |
+
"loss": 1.6195,
|
6218 |
+
"step": 1774
|
6219 |
+
},
|
6220 |
+
{
|
6221 |
+
"epoch": 0.13644744929317762,
|
6222 |
+
"grad_norm": 4.450915813446045,
|
6223 |
+
"learning_rate": 1.945421020282729e-05,
|
6224 |
+
"loss": 1.6398,
|
6225 |
+
"step": 1776
|
6226 |
+
},
|
6227 |
+
{
|
6228 |
+
"epoch": 0.13660110633066994,
|
6229 |
+
"grad_norm": 4.265727996826172,
|
6230 |
+
"learning_rate": 1.9453595574677322e-05,
|
6231 |
+
"loss": 1.5958,
|
6232 |
+
"step": 1778
|
6233 |
+
},
|
6234 |
+
{
|
6235 |
+
"epoch": 0.13675476336816225,
|
6236 |
+
"grad_norm": 4.036159038543701,
|
6237 |
+
"learning_rate": 1.9452980946527352e-05,
|
6238 |
+
"loss": 1.5647,
|
6239 |
+
"step": 1780
|
6240 |
+
},
|
6241 |
+
{
|
6242 |
+
"epoch": 0.13690842040565457,
|
6243 |
+
"grad_norm": 4.2282257080078125,
|
6244 |
+
"learning_rate": 1.945236631837738e-05,
|
6245 |
+
"loss": 1.6079,
|
6246 |
+
"step": 1782
|
6247 |
+
},
|
6248 |
+
{
|
6249 |
+
"epoch": 0.13706207744314688,
|
6250 |
+
"grad_norm": 4.005040645599365,
|
6251 |
+
"learning_rate": 1.9451751690227415e-05,
|
6252 |
+
"loss": 1.5044,
|
6253 |
+
"step": 1784
|
6254 |
+
},
|
6255 |
+
{
|
6256 |
+
"epoch": 0.1372157344806392,
|
6257 |
+
"grad_norm": 4.676270484924316,
|
6258 |
+
"learning_rate": 1.9451137062077444e-05,
|
6259 |
+
"loss": 1.6304,
|
6260 |
+
"step": 1786
|
6261 |
+
},
|
6262 |
+
{
|
6263 |
+
"epoch": 0.13736939151813152,
|
6264 |
+
"grad_norm": 4.598161697387695,
|
6265 |
+
"learning_rate": 1.9450522433927474e-05,
|
6266 |
+
"loss": 1.6887,
|
6267 |
+
"step": 1788
|
6268 |
+
},
|
6269 |
+
{
|
6270 |
+
"epoch": 0.13752304855562386,
|
6271 |
+
"grad_norm": 5.0116448402404785,
|
6272 |
+
"learning_rate": 1.9449907805777507e-05,
|
6273 |
+
"loss": 1.6998,
|
6274 |
+
"step": 1790
|
6275 |
+
},
|
6276 |
+
{
|
6277 |
+
"epoch": 0.13767670559311618,
|
6278 |
+
"grad_norm": 4.892838954925537,
|
6279 |
+
"learning_rate": 1.9449293177627537e-05,
|
6280 |
+
"loss": 1.6274,
|
6281 |
+
"step": 1792
|
6282 |
+
},
|
6283 |
+
{
|
6284 |
+
"epoch": 0.1378303626306085,
|
6285 |
+
"grad_norm": 5.293637752532959,
|
6286 |
+
"learning_rate": 1.944867854947757e-05,
|
6287 |
+
"loss": 1.628,
|
6288 |
+
"step": 1794
|
6289 |
+
},
|
6290 |
+
{
|
6291 |
+
"epoch": 0.1379840196681008,
|
6292 |
+
"grad_norm": 4.583549976348877,
|
6293 |
+
"learning_rate": 1.94480639213276e-05,
|
6294 |
+
"loss": 1.4792,
|
6295 |
+
"step": 1796
|
6296 |
+
},
|
6297 |
+
{
|
6298 |
+
"epoch": 0.13813767670559313,
|
6299 |
+
"grad_norm": 3.773277759552002,
|
6300 |
+
"learning_rate": 1.944744929317763e-05,
|
6301 |
+
"loss": 1.5219,
|
6302 |
+
"step": 1798
|
6303 |
+
},
|
6304 |
+
{
|
6305 |
+
"epoch": 0.13829133374308544,
|
6306 |
+
"grad_norm": 4.440420150756836,
|
6307 |
+
"learning_rate": 1.9446834665027662e-05,
|
6308 |
+
"loss": 1.6732,
|
6309 |
+
"step": 1800
|
6310 |
+
},
|
6311 |
+
{
|
6312 |
+
"epoch": 0.13844499078057776,
|
6313 |
+
"grad_norm": 4.711763858795166,
|
6314 |
+
"learning_rate": 1.944622003687769e-05,
|
6315 |
+
"loss": 1.5463,
|
6316 |
+
"step": 1802
|
6317 |
+
},
|
6318 |
+
{
|
6319 |
+
"epoch": 0.13859864781807008,
|
6320 |
+
"grad_norm": 5.035058498382568,
|
6321 |
+
"learning_rate": 1.944560540872772e-05,
|
6322 |
+
"loss": 1.6961,
|
6323 |
+
"step": 1804
|
6324 |
+
},
|
6325 |
+
{
|
6326 |
+
"epoch": 0.1387523048555624,
|
6327 |
+
"grad_norm": 3.963282346725464,
|
6328 |
+
"learning_rate": 1.944499078057775e-05,
|
6329 |
+
"loss": 1.5568,
|
6330 |
+
"step": 1806
|
6331 |
+
},
|
6332 |
+
{
|
6333 |
+
"epoch": 0.1389059618930547,
|
6334 |
+
"grad_norm": 4.577483654022217,
|
6335 |
+
"learning_rate": 1.944437615242778e-05,
|
6336 |
+
"loss": 1.5428,
|
6337 |
+
"step": 1808
|
6338 |
+
},
|
6339 |
+
{
|
6340 |
+
"epoch": 0.13905961893054702,
|
6341 |
+
"grad_norm": 4.509146690368652,
|
6342 |
+
"learning_rate": 1.9443761524277814e-05,
|
6343 |
+
"loss": 1.6397,
|
6344 |
+
"step": 1810
|
6345 |
+
},
|
6346 |
+
{
|
6347 |
+
"epoch": 0.13921327596803934,
|
6348 |
+
"grad_norm": 4.317050933837891,
|
6349 |
+
"learning_rate": 1.9443146896127844e-05,
|
6350 |
+
"loss": 1.7304,
|
6351 |
+
"step": 1812
|
6352 |
+
},
|
6353 |
+
{
|
6354 |
+
"epoch": 0.13936693300553166,
|
6355 |
+
"grad_norm": 4.572277069091797,
|
6356 |
+
"learning_rate": 1.9442532267977877e-05,
|
6357 |
+
"loss": 1.6759,
|
6358 |
+
"step": 1814
|
6359 |
+
},
|
6360 |
+
{
|
6361 |
+
"epoch": 0.13952059004302397,
|
6362 |
+
"grad_norm": 4.773606777191162,
|
6363 |
+
"learning_rate": 1.9441917639827906e-05,
|
6364 |
+
"loss": 1.6869,
|
6365 |
+
"step": 1816
|
6366 |
+
},
|
6367 |
+
{
|
6368 |
+
"epoch": 0.1396742470805163,
|
6369 |
+
"grad_norm": 4.57815408706665,
|
6370 |
+
"learning_rate": 1.9441303011677936e-05,
|
6371 |
+
"loss": 1.647,
|
6372 |
+
"step": 1818
|
6373 |
+
},
|
6374 |
+
{
|
6375 |
+
"epoch": 0.1398279041180086,
|
6376 |
+
"grad_norm": 4.822877407073975,
|
6377 |
+
"learning_rate": 1.944068838352797e-05,
|
6378 |
+
"loss": 1.6182,
|
6379 |
+
"step": 1820
|
6380 |
+
},
|
6381 |
+
{
|
6382 |
+
"epoch": 0.13998156115550092,
|
6383 |
+
"grad_norm": 4.272431373596191,
|
6384 |
+
"learning_rate": 1.9440073755378e-05,
|
6385 |
+
"loss": 1.6215,
|
6386 |
+
"step": 1822
|
6387 |
+
},
|
6388 |
+
{
|
6389 |
+
"epoch": 0.14013521819299324,
|
6390 |
+
"grad_norm": 4.476557731628418,
|
6391 |
+
"learning_rate": 1.943945912722803e-05,
|
6392 |
+
"loss": 1.6277,
|
6393 |
+
"step": 1824
|
6394 |
+
},
|
6395 |
+
{
|
6396 |
+
"epoch": 0.14028887523048555,
|
6397 |
+
"grad_norm": 4.522927284240723,
|
6398 |
+
"learning_rate": 1.943884449907806e-05,
|
6399 |
+
"loss": 1.526,
|
6400 |
+
"step": 1826
|
6401 |
+
},
|
6402 |
+
{
|
6403 |
+
"epoch": 0.14044253226797787,
|
6404 |
+
"grad_norm": 3.991070032119751,
|
6405 |
+
"learning_rate": 1.9438229870928088e-05,
|
6406 |
+
"loss": 1.6106,
|
6407 |
+
"step": 1828
|
6408 |
+
},
|
6409 |
+
{
|
6410 |
+
"epoch": 0.1405961893054702,
|
6411 |
+
"grad_norm": 4.189483165740967,
|
6412 |
+
"learning_rate": 1.943761524277812e-05,
|
6413 |
+
"loss": 1.619,
|
6414 |
+
"step": 1830
|
6415 |
+
},
|
6416 |
+
{
|
6417 |
+
"epoch": 0.1407498463429625,
|
6418 |
+
"grad_norm": 4.5693159103393555,
|
6419 |
+
"learning_rate": 1.943700061462815e-05,
|
6420 |
+
"loss": 1.7438,
|
6421 |
+
"step": 1832
|
6422 |
+
},
|
6423 |
+
{
|
6424 |
+
"epoch": 0.14090350338045482,
|
6425 |
+
"grad_norm": 3.8766119480133057,
|
6426 |
+
"learning_rate": 1.943638598647818e-05,
|
6427 |
+
"loss": 1.4953,
|
6428 |
+
"step": 1834
|
6429 |
+
},
|
6430 |
+
{
|
6431 |
+
"epoch": 0.14105716041794714,
|
6432 |
+
"grad_norm": 4.294021129608154,
|
6433 |
+
"learning_rate": 1.9435771358328213e-05,
|
6434 |
+
"loss": 1.5746,
|
6435 |
+
"step": 1836
|
6436 |
+
},
|
6437 |
+
{
|
6438 |
+
"epoch": 0.14121081745543945,
|
6439 |
+
"grad_norm": 4.195743083953857,
|
6440 |
+
"learning_rate": 1.9435156730178243e-05,
|
6441 |
+
"loss": 1.5049,
|
6442 |
+
"step": 1838
|
6443 |
+
},
|
6444 |
+
{
|
6445 |
+
"epoch": 0.14136447449293177,
|
6446 |
+
"grad_norm": 4.331358909606934,
|
6447 |
+
"learning_rate": 1.9434542102028276e-05,
|
6448 |
+
"loss": 1.8416,
|
6449 |
+
"step": 1840
|
6450 |
+
},
|
6451 |
+
{
|
6452 |
+
"epoch": 0.14151813153042408,
|
6453 |
+
"grad_norm": 4.328099727630615,
|
6454 |
+
"learning_rate": 1.9433927473878306e-05,
|
6455 |
+
"loss": 1.7201,
|
6456 |
+
"step": 1842
|
6457 |
+
},
|
6458 |
+
{
|
6459 |
+
"epoch": 0.1416717885679164,
|
6460 |
+
"grad_norm": 4.2462005615234375,
|
6461 |
+
"learning_rate": 1.9433312845728336e-05,
|
6462 |
+
"loss": 1.6308,
|
6463 |
+
"step": 1844
|
6464 |
+
},
|
6465 |
+
{
|
6466 |
+
"epoch": 0.14182544560540872,
|
6467 |
+
"grad_norm": 4.253352165222168,
|
6468 |
+
"learning_rate": 1.943269821757837e-05,
|
6469 |
+
"loss": 1.6856,
|
6470 |
+
"step": 1846
|
6471 |
+
},
|
6472 |
+
{
|
6473 |
+
"epoch": 0.14197910264290103,
|
6474 |
+
"grad_norm": 4.154186248779297,
|
6475 |
+
"learning_rate": 1.9432083589428395e-05,
|
6476 |
+
"loss": 1.6079,
|
6477 |
+
"step": 1848
|
6478 |
+
},
|
6479 |
+
{
|
6480 |
+
"epoch": 0.14213275968039335,
|
6481 |
+
"grad_norm": 6.227648735046387,
|
6482 |
+
"learning_rate": 1.9431468961278428e-05,
|
6483 |
+
"loss": 1.73,
|
6484 |
+
"step": 1850
|
6485 |
+
},
|
6486 |
+
{
|
6487 |
+
"epoch": 0.14228641671788567,
|
6488 |
+
"grad_norm": 4.038461208343506,
|
6489 |
+
"learning_rate": 1.943085433312846e-05,
|
6490 |
+
"loss": 1.7227,
|
6491 |
+
"step": 1852
|
6492 |
+
},
|
6493 |
+
{
|
6494 |
+
"epoch": 0.14244007375537798,
|
6495 |
+
"grad_norm": 4.844911098480225,
|
6496 |
+
"learning_rate": 1.9430239704978487e-05,
|
6497 |
+
"loss": 1.5841,
|
6498 |
+
"step": 1854
|
6499 |
+
},
|
6500 |
+
{
|
6501 |
+
"epoch": 0.14259373079287033,
|
6502 |
+
"grad_norm": 3.845120429992676,
|
6503 |
+
"learning_rate": 1.942962507682852e-05,
|
6504 |
+
"loss": 1.6545,
|
6505 |
+
"step": 1856
|
6506 |
+
},
|
6507 |
+
{
|
6508 |
+
"epoch": 0.14274738783036264,
|
6509 |
+
"grad_norm": 4.25357723236084,
|
6510 |
+
"learning_rate": 1.942901044867855e-05,
|
6511 |
+
"loss": 1.6038,
|
6512 |
+
"step": 1858
|
6513 |
+
},
|
6514 |
+
{
|
6515 |
+
"epoch": 0.14290104486785496,
|
6516 |
+
"grad_norm": 4.518612861633301,
|
6517 |
+
"learning_rate": 1.9428395820528583e-05,
|
6518 |
+
"loss": 1.703,
|
6519 |
+
"step": 1860
|
6520 |
+
},
|
6521 |
+
{
|
6522 |
+
"epoch": 0.14305470190534728,
|
6523 |
+
"grad_norm": 4.541075229644775,
|
6524 |
+
"learning_rate": 1.9427781192378613e-05,
|
6525 |
+
"loss": 1.6203,
|
6526 |
+
"step": 1862
|
6527 |
+
},
|
6528 |
+
{
|
6529 |
+
"epoch": 0.1432083589428396,
|
6530 |
+
"grad_norm": 4.06412935256958,
|
6531 |
+
"learning_rate": 1.9427166564228643e-05,
|
6532 |
+
"loss": 1.714,
|
6533 |
+
"step": 1864
|
6534 |
+
},
|
6535 |
+
{
|
6536 |
+
"epoch": 0.1433620159803319,
|
6537 |
+
"grad_norm": 4.289870738983154,
|
6538 |
+
"learning_rate": 1.9426551936078676e-05,
|
6539 |
+
"loss": 1.5524,
|
6540 |
+
"step": 1866
|
6541 |
+
},
|
6542 |
+
{
|
6543 |
+
"epoch": 0.14351567301782422,
|
6544 |
+
"grad_norm": 3.937469005584717,
|
6545 |
+
"learning_rate": 1.9425937307928705e-05,
|
6546 |
+
"loss": 1.5572,
|
6547 |
+
"step": 1868
|
6548 |
+
},
|
6549 |
+
{
|
6550 |
+
"epoch": 0.14366933005531654,
|
6551 |
+
"grad_norm": 4.361362457275391,
|
6552 |
+
"learning_rate": 1.9425322679778735e-05,
|
6553 |
+
"loss": 1.493,
|
6554 |
+
"step": 1870
|
6555 |
+
},
|
6556 |
+
{
|
6557 |
+
"epoch": 0.14382298709280886,
|
6558 |
+
"grad_norm": 3.9257559776306152,
|
6559 |
+
"learning_rate": 1.9424708051628768e-05,
|
6560 |
+
"loss": 1.502,
|
6561 |
+
"step": 1872
|
6562 |
+
},
|
6563 |
+
{
|
6564 |
+
"epoch": 0.14397664413030117,
|
6565 |
+
"grad_norm": 4.2765655517578125,
|
6566 |
+
"learning_rate": 1.9424093423478794e-05,
|
6567 |
+
"loss": 1.6001,
|
6568 |
+
"step": 1874
|
6569 |
+
},
|
6570 |
+
{
|
6571 |
+
"epoch": 0.1441303011677935,
|
6572 |
+
"grad_norm": 3.724155902862549,
|
6573 |
+
"learning_rate": 1.9423478795328827e-05,
|
6574 |
+
"loss": 1.5722,
|
6575 |
+
"step": 1876
|
6576 |
+
},
|
6577 |
+
{
|
6578 |
+
"epoch": 0.1442839582052858,
|
6579 |
+
"grad_norm": 4.135402679443359,
|
6580 |
+
"learning_rate": 1.9422864167178857e-05,
|
6581 |
+
"loss": 1.7137,
|
6582 |
+
"step": 1878
|
6583 |
+
},
|
6584 |
+
{
|
6585 |
+
"epoch": 0.14443761524277812,
|
6586 |
+
"grad_norm": 4.522433280944824,
|
6587 |
+
"learning_rate": 1.942224953902889e-05,
|
6588 |
+
"loss": 1.6161,
|
6589 |
+
"step": 1880
|
6590 |
+
},
|
6591 |
+
{
|
6592 |
+
"epoch": 0.14459127228027044,
|
6593 |
+
"grad_norm": 4.247946262359619,
|
6594 |
+
"learning_rate": 1.942163491087892e-05,
|
6595 |
+
"loss": 1.67,
|
6596 |
+
"step": 1882
|
6597 |
+
},
|
6598 |
+
{
|
6599 |
+
"epoch": 0.14474492931776275,
|
6600 |
+
"grad_norm": 4.322160243988037,
|
6601 |
+
"learning_rate": 1.942102028272895e-05,
|
6602 |
+
"loss": 1.5362,
|
6603 |
+
"step": 1884
|
6604 |
+
},
|
6605 |
+
{
|
6606 |
+
"epoch": 0.14489858635525507,
|
6607 |
+
"grad_norm": 4.25150728225708,
|
6608 |
+
"learning_rate": 1.9420405654578983e-05,
|
6609 |
+
"loss": 1.6026,
|
6610 |
+
"step": 1886
|
6611 |
+
},
|
6612 |
+
{
|
6613 |
+
"epoch": 0.1450522433927474,
|
6614 |
+
"grad_norm": 4.95831823348999,
|
6615 |
+
"learning_rate": 1.9419791026429012e-05,
|
6616 |
+
"loss": 1.6806,
|
6617 |
+
"step": 1888
|
6618 |
+
},
|
6619 |
+
{
|
6620 |
+
"epoch": 0.1452059004302397,
|
6621 |
+
"grad_norm": 4.125936031341553,
|
6622 |
+
"learning_rate": 1.9419176398279042e-05,
|
6623 |
+
"loss": 1.634,
|
6624 |
+
"step": 1890
|
6625 |
+
},
|
6626 |
+
{
|
6627 |
+
"epoch": 0.14535955746773202,
|
6628 |
+
"grad_norm": 3.6493210792541504,
|
6629 |
+
"learning_rate": 1.9418561770129075e-05,
|
6630 |
+
"loss": 1.4507,
|
6631 |
+
"step": 1892
|
6632 |
+
},
|
6633 |
+
{
|
6634 |
+
"epoch": 0.14551321450522434,
|
6635 |
+
"grad_norm": 4.338488578796387,
|
6636 |
+
"learning_rate": 1.9417947141979105e-05,
|
6637 |
+
"loss": 1.5127,
|
6638 |
+
"step": 1894
|
6639 |
+
},
|
6640 |
+
{
|
6641 |
+
"epoch": 0.14566687154271665,
|
6642 |
+
"grad_norm": 4.250901222229004,
|
6643 |
+
"learning_rate": 1.9417332513829134e-05,
|
6644 |
+
"loss": 1.4201,
|
6645 |
+
"step": 1896
|
6646 |
+
},
|
6647 |
+
{
|
6648 |
+
"epoch": 0.14582052858020897,
|
6649 |
+
"grad_norm": 3.8600480556488037,
|
6650 |
+
"learning_rate": 1.9416717885679167e-05,
|
6651 |
+
"loss": 1.5595,
|
6652 |
+
"step": 1898
|
6653 |
+
},
|
6654 |
+
{
|
6655 |
+
"epoch": 0.14597418561770129,
|
6656 |
+
"grad_norm": 4.317285537719727,
|
6657 |
+
"learning_rate": 1.9416103257529197e-05,
|
6658 |
+
"loss": 1.6094,
|
6659 |
+
"step": 1900
|
6660 |
+
},
|
6661 |
+
{
|
6662 |
+
"epoch": 0.1461278426551936,
|
6663 |
+
"grad_norm": 4.718072891235352,
|
6664 |
+
"learning_rate": 1.9415488629379227e-05,
|
6665 |
+
"loss": 1.768,
|
6666 |
+
"step": 1902
|
6667 |
+
},
|
6668 |
+
{
|
6669 |
+
"epoch": 0.14628149969268592,
|
6670 |
+
"grad_norm": 4.9370808601379395,
|
6671 |
+
"learning_rate": 1.9414874001229257e-05,
|
6672 |
+
"loss": 1.5801,
|
6673 |
+
"step": 1904
|
6674 |
+
},
|
6675 |
+
{
|
6676 |
+
"epoch": 0.14643515673017823,
|
6677 |
+
"grad_norm": 4.436810493469238,
|
6678 |
+
"learning_rate": 1.941425937307929e-05,
|
6679 |
+
"loss": 1.5847,
|
6680 |
+
"step": 1906
|
6681 |
+
},
|
6682 |
+
{
|
6683 |
+
"epoch": 0.14658881376767055,
|
6684 |
+
"grad_norm": 4.890700817108154,
|
6685 |
+
"learning_rate": 1.941364474492932e-05,
|
6686 |
+
"loss": 1.5092,
|
6687 |
+
"step": 1908
|
6688 |
+
},
|
6689 |
+
{
|
6690 |
+
"epoch": 0.14674247080516287,
|
6691 |
+
"grad_norm": 3.926815986633301,
|
6692 |
+
"learning_rate": 1.941303011677935e-05,
|
6693 |
+
"loss": 1.6612,
|
6694 |
+
"step": 1910
|
6695 |
+
},
|
6696 |
+
{
|
6697 |
+
"epoch": 0.14689612784265518,
|
6698 |
+
"grad_norm": 4.331315994262695,
|
6699 |
+
"learning_rate": 1.9412415488629382e-05,
|
6700 |
+
"loss": 1.7669,
|
6701 |
+
"step": 1912
|
6702 |
+
},
|
6703 |
+
{
|
6704 |
+
"epoch": 0.1470497848801475,
|
6705 |
+
"grad_norm": 5.178247928619385,
|
6706 |
+
"learning_rate": 1.9411800860479412e-05,
|
6707 |
+
"loss": 1.6749,
|
6708 |
+
"step": 1914
|
6709 |
+
},
|
6710 |
+
{
|
6711 |
+
"epoch": 0.14720344191763982,
|
6712 |
+
"grad_norm": 3.871377944946289,
|
6713 |
+
"learning_rate": 1.941118623232944e-05,
|
6714 |
+
"loss": 1.4508,
|
6715 |
+
"step": 1916
|
6716 |
+
},
|
6717 |
+
{
|
6718 |
+
"epoch": 0.14735709895513213,
|
6719 |
+
"grad_norm": 4.062928676605225,
|
6720 |
+
"learning_rate": 1.9410571604179474e-05,
|
6721 |
+
"loss": 1.458,
|
6722 |
+
"step": 1918
|
6723 |
+
},
|
6724 |
+
{
|
6725 |
+
"epoch": 0.14751075599262448,
|
6726 |
+
"grad_norm": 4.205310344696045,
|
6727 |
+
"learning_rate": 1.9409956976029504e-05,
|
6728 |
+
"loss": 1.6419,
|
6729 |
+
"step": 1920
|
6730 |
+
},
|
6731 |
+
{
|
6732 |
+
"epoch": 0.1476644130301168,
|
6733 |
+
"grad_norm": 3.822014093399048,
|
6734 |
+
"learning_rate": 1.9409342347879534e-05,
|
6735 |
+
"loss": 1.558,
|
6736 |
+
"step": 1922
|
6737 |
+
},
|
6738 |
+
{
|
6739 |
+
"epoch": 0.1478180700676091,
|
6740 |
+
"grad_norm": 4.9377593994140625,
|
6741 |
+
"learning_rate": 1.9408727719729567e-05,
|
6742 |
+
"loss": 1.687,
|
6743 |
+
"step": 1924
|
6744 |
+
},
|
6745 |
+
{
|
6746 |
+
"epoch": 0.14797172710510142,
|
6747 |
+
"grad_norm": 4.531102657318115,
|
6748 |
+
"learning_rate": 1.9408113091579597e-05,
|
6749 |
+
"loss": 1.7183,
|
6750 |
+
"step": 1926
|
6751 |
+
},
|
6752 |
+
{
|
6753 |
+
"epoch": 0.14812538414259374,
|
6754 |
+
"grad_norm": 4.2398881912231445,
|
6755 |
+
"learning_rate": 1.9407498463429626e-05,
|
6756 |
+
"loss": 1.5952,
|
6757 |
+
"step": 1928
|
6758 |
+
},
|
6759 |
+
{
|
6760 |
+
"epoch": 0.14827904118008606,
|
6761 |
+
"grad_norm": 4.426001071929932,
|
6762 |
+
"learning_rate": 1.9406883835279656e-05,
|
6763 |
+
"loss": 1.7844,
|
6764 |
+
"step": 1930
|
6765 |
+
},
|
6766 |
+
{
|
6767 |
+
"epoch": 0.14843269821757837,
|
6768 |
+
"grad_norm": 4.2123494148254395,
|
6769 |
+
"learning_rate": 1.940626920712969e-05,
|
6770 |
+
"loss": 1.6711,
|
6771 |
+
"step": 1932
|
6772 |
+
},
|
6773 |
+
{
|
6774 |
+
"epoch": 0.1485863552550707,
|
6775 |
+
"grad_norm": 4.681150913238525,
|
6776 |
+
"learning_rate": 1.940565457897972e-05,
|
6777 |
+
"loss": 1.6319,
|
6778 |
+
"step": 1934
|
6779 |
+
},
|
6780 |
+
{
|
6781 |
+
"epoch": 0.148740012292563,
|
6782 |
+
"grad_norm": 4.499131202697754,
|
6783 |
+
"learning_rate": 1.940503995082975e-05,
|
6784 |
+
"loss": 1.6108,
|
6785 |
+
"step": 1936
|
6786 |
+
},
|
6787 |
+
{
|
6788 |
+
"epoch": 0.14889366933005532,
|
6789 |
+
"grad_norm": 5.171452522277832,
|
6790 |
+
"learning_rate": 1.940442532267978e-05,
|
6791 |
+
"loss": 1.7528,
|
6792 |
+
"step": 1938
|
6793 |
+
},
|
6794 |
+
{
|
6795 |
+
"epoch": 0.14904732636754764,
|
6796 |
+
"grad_norm": 4.263631343841553,
|
6797 |
+
"learning_rate": 1.940381069452981e-05,
|
6798 |
+
"loss": 1.5191,
|
6799 |
+
"step": 1940
|
6800 |
+
},
|
6801 |
+
{
|
6802 |
+
"epoch": 0.14920098340503996,
|
6803 |
+
"grad_norm": 5.029245853424072,
|
6804 |
+
"learning_rate": 1.940319606637984e-05,
|
6805 |
+
"loss": 1.5623,
|
6806 |
+
"step": 1942
|
6807 |
+
},
|
6808 |
+
{
|
6809 |
+
"epoch": 0.14935464044253227,
|
6810 |
+
"grad_norm": 4.505579948425293,
|
6811 |
+
"learning_rate": 1.9402581438229874e-05,
|
6812 |
+
"loss": 1.4555,
|
6813 |
+
"step": 1944
|
6814 |
+
},
|
6815 |
+
{
|
6816 |
+
"epoch": 0.1495082974800246,
|
6817 |
+
"grad_norm": 4.269020080566406,
|
6818 |
+
"learning_rate": 1.9401966810079904e-05,
|
6819 |
+
"loss": 1.5765,
|
6820 |
+
"step": 1946
|
6821 |
+
},
|
6822 |
+
{
|
6823 |
+
"epoch": 0.1496619545175169,
|
6824 |
+
"grad_norm": 3.9689948558807373,
|
6825 |
+
"learning_rate": 1.9401352181929933e-05,
|
6826 |
+
"loss": 1.6277,
|
6827 |
+
"step": 1948
|
6828 |
+
},
|
6829 |
+
{
|
6830 |
+
"epoch": 0.14981561155500922,
|
6831 |
+
"grad_norm": 3.684130907058716,
|
6832 |
+
"learning_rate": 1.9400737553779966e-05,
|
6833 |
+
"loss": 1.5172,
|
6834 |
+
"step": 1950
|
6835 |
+
},
|
6836 |
+
{
|
6837 |
+
"epoch": 0.14996926859250154,
|
6838 |
+
"grad_norm": 3.713602066040039,
|
6839 |
+
"learning_rate": 1.9400122925629996e-05,
|
6840 |
+
"loss": 1.5211,
|
6841 |
+
"step": 1952
|
6842 |
+
},
|
6843 |
+
{
|
6844 |
+
"epoch": 0.15012292562999385,
|
6845 |
+
"grad_norm": 4.927131175994873,
|
6846 |
+
"learning_rate": 1.9399508297480026e-05,
|
6847 |
+
"loss": 1.6324,
|
6848 |
+
"step": 1954
|
6849 |
+
},
|
6850 |
+
{
|
6851 |
+
"epoch": 0.15027658266748617,
|
6852 |
+
"grad_norm": 4.4799299240112305,
|
6853 |
+
"learning_rate": 1.9398893669330055e-05,
|
6854 |
+
"loss": 1.5679,
|
6855 |
+
"step": 1956
|
6856 |
+
},
|
6857 |
+
{
|
6858 |
+
"epoch": 0.15043023970497849,
|
6859 |
+
"grad_norm": 4.372833251953125,
|
6860 |
+
"learning_rate": 1.939827904118009e-05,
|
6861 |
+
"loss": 1.5638,
|
6862 |
+
"step": 1958
|
6863 |
+
},
|
6864 |
+
{
|
6865 |
+
"epoch": 0.1505838967424708,
|
6866 |
+
"grad_norm": 4.321191787719727,
|
6867 |
+
"learning_rate": 1.9397664413030118e-05,
|
6868 |
+
"loss": 1.4329,
|
6869 |
+
"step": 1960
|
6870 |
+
},
|
6871 |
+
{
|
6872 |
+
"epoch": 0.15073755377996312,
|
6873 |
+
"grad_norm": 4.75023078918457,
|
6874 |
+
"learning_rate": 1.9397049784880148e-05,
|
6875 |
+
"loss": 1.6682,
|
6876 |
+
"step": 1962
|
6877 |
+
},
|
6878 |
+
{
|
6879 |
+
"epoch": 0.15089121081745543,
|
6880 |
+
"grad_norm": 4.172933101654053,
|
6881 |
+
"learning_rate": 1.939643515673018e-05,
|
6882 |
+
"loss": 1.7316,
|
6883 |
+
"step": 1964
|
6884 |
+
},
|
6885 |
+
{
|
6886 |
+
"epoch": 0.15104486785494775,
|
6887 |
+
"grad_norm": 4.329415321350098,
|
6888 |
+
"learning_rate": 1.939582052858021e-05,
|
6889 |
+
"loss": 1.7017,
|
6890 |
+
"step": 1966
|
6891 |
+
},
|
6892 |
+
{
|
6893 |
+
"epoch": 0.15119852489244007,
|
6894 |
+
"grad_norm": 4.249721527099609,
|
6895 |
+
"learning_rate": 1.939520590043024e-05,
|
6896 |
+
"loss": 1.5796,
|
6897 |
+
"step": 1968
|
6898 |
+
},
|
6899 |
+
{
|
6900 |
+
"epoch": 0.15135218192993238,
|
6901 |
+
"grad_norm": 4.071712970733643,
|
6902 |
+
"learning_rate": 1.9394591272280273e-05,
|
6903 |
+
"loss": 1.4623,
|
6904 |
+
"step": 1970
|
6905 |
+
},
|
6906 |
+
{
|
6907 |
+
"epoch": 0.1515058389674247,
|
6908 |
+
"grad_norm": 4.0507731437683105,
|
6909 |
+
"learning_rate": 1.9393976644130303e-05,
|
6910 |
+
"loss": 1.4821,
|
6911 |
+
"step": 1972
|
6912 |
+
},
|
6913 |
+
{
|
6914 |
+
"epoch": 0.15165949600491702,
|
6915 |
+
"grad_norm": 4.356963634490967,
|
6916 |
+
"learning_rate": 1.9393362015980333e-05,
|
6917 |
+
"loss": 1.547,
|
6918 |
+
"step": 1974
|
6919 |
+
},
|
6920 |
+
{
|
6921 |
+
"epoch": 0.15181315304240933,
|
6922 |
+
"grad_norm": 5.182737350463867,
|
6923 |
+
"learning_rate": 1.9392747387830362e-05,
|
6924 |
+
"loss": 1.6367,
|
6925 |
+
"step": 1976
|
6926 |
+
},
|
6927 |
+
{
|
6928 |
+
"epoch": 0.15196681007990165,
|
6929 |
+
"grad_norm": 3.9492363929748535,
|
6930 |
+
"learning_rate": 1.9392132759680395e-05,
|
6931 |
+
"loss": 1.679,
|
6932 |
+
"step": 1978
|
6933 |
+
},
|
6934 |
+
{
|
6935 |
+
"epoch": 0.15212046711739396,
|
6936 |
+
"grad_norm": 4.404160976409912,
|
6937 |
+
"learning_rate": 1.9391518131530425e-05,
|
6938 |
+
"loss": 1.5288,
|
6939 |
+
"step": 1980
|
6940 |
+
},
|
6941 |
+
{
|
6942 |
+
"epoch": 0.15227412415488628,
|
6943 |
+
"grad_norm": 4.500973701477051,
|
6944 |
+
"learning_rate": 1.9390903503380455e-05,
|
6945 |
+
"loss": 1.7715,
|
6946 |
+
"step": 1982
|
6947 |
+
},
|
6948 |
+
{
|
6949 |
+
"epoch": 0.1524277811923786,
|
6950 |
+
"grad_norm": 4.121068954467773,
|
6951 |
+
"learning_rate": 1.9390288875230488e-05,
|
6952 |
+
"loss": 1.5507,
|
6953 |
+
"step": 1984
|
6954 |
+
},
|
6955 |
+
{
|
6956 |
+
"epoch": 0.15258143822987094,
|
6957 |
+
"grad_norm": 3.7095515727996826,
|
6958 |
+
"learning_rate": 1.9389674247080518e-05,
|
6959 |
+
"loss": 1.616,
|
6960 |
+
"step": 1986
|
6961 |
+
},
|
6962 |
+
{
|
6963 |
+
"epoch": 0.15273509526736326,
|
6964 |
+
"grad_norm": 5.333407878875732,
|
6965 |
+
"learning_rate": 1.9389059618930547e-05,
|
6966 |
+
"loss": 1.7196,
|
6967 |
+
"step": 1988
|
6968 |
+
},
|
6969 |
+
{
|
6970 |
+
"epoch": 0.15288875230485557,
|
6971 |
+
"grad_norm": 4.188971042633057,
|
6972 |
+
"learning_rate": 1.938844499078058e-05,
|
6973 |
+
"loss": 1.6256,
|
6974 |
+
"step": 1990
|
6975 |
+
},
|
6976 |
+
{
|
6977 |
+
"epoch": 0.1530424093423479,
|
6978 |
+
"grad_norm": 4.126604080200195,
|
6979 |
+
"learning_rate": 1.938783036263061e-05,
|
6980 |
+
"loss": 1.5089,
|
6981 |
+
"step": 1992
|
6982 |
+
},
|
6983 |
+
{
|
6984 |
+
"epoch": 0.1531960663798402,
|
6985 |
+
"grad_norm": 4.127197742462158,
|
6986 |
+
"learning_rate": 1.938721573448064e-05,
|
6987 |
+
"loss": 1.5598,
|
6988 |
+
"step": 1994
|
6989 |
+
},
|
6990 |
+
{
|
6991 |
+
"epoch": 0.15334972341733252,
|
6992 |
+
"grad_norm": 4.481958389282227,
|
6993 |
+
"learning_rate": 1.9386601106330673e-05,
|
6994 |
+
"loss": 1.6238,
|
6995 |
+
"step": 1996
|
6996 |
+
},
|
6997 |
+
{
|
6998 |
+
"epoch": 0.15350338045482484,
|
6999 |
+
"grad_norm": 4.15784215927124,
|
7000 |
+
"learning_rate": 1.9385986478180702e-05,
|
7001 |
+
"loss": 1.6611,
|
7002 |
+
"step": 1998
|
7003 |
+
},
|
7004 |
+
{
|
7005 |
+
"epoch": 0.15365703749231716,
|
7006 |
+
"grad_norm": 4.322861194610596,
|
7007 |
+
"learning_rate": 1.9385371850030732e-05,
|
7008 |
+
"loss": 1.513,
|
7009 |
+
"step": 2000
|
7010 |
+
},
|
7011 |
+
{
|
7012 |
+
"epoch": 0.15381069452980947,
|
7013 |
+
"grad_norm": 3.9926345348358154,
|
7014 |
+
"learning_rate": 1.9384757221880762e-05,
|
7015 |
+
"loss": 1.5764,
|
7016 |
+
"step": 2002
|
7017 |
+
},
|
7018 |
+
{
|
7019 |
+
"epoch": 0.1539643515673018,
|
7020 |
+
"grad_norm": 5.112368583679199,
|
7021 |
+
"learning_rate": 1.9384142593730795e-05,
|
7022 |
+
"loss": 1.6181,
|
7023 |
+
"step": 2004
|
7024 |
+
},
|
7025 |
+
{
|
7026 |
+
"epoch": 0.1541180086047941,
|
7027 |
+
"grad_norm": 3.6655466556549072,
|
7028 |
+
"learning_rate": 1.9383527965580825e-05,
|
7029 |
+
"loss": 1.3721,
|
7030 |
+
"step": 2006
|
7031 |
+
},
|
7032 |
+
{
|
7033 |
+
"epoch": 0.15427166564228642,
|
7034 |
+
"grad_norm": 4.533908367156982,
|
7035 |
+
"learning_rate": 1.9382913337430854e-05,
|
7036 |
+
"loss": 1.5841,
|
7037 |
+
"step": 2008
|
7038 |
+
},
|
7039 |
+
{
|
7040 |
+
"epoch": 0.15442532267977874,
|
7041 |
+
"grad_norm": 3.722304344177246,
|
7042 |
+
"learning_rate": 1.9382298709280887e-05,
|
7043 |
+
"loss": 1.5965,
|
7044 |
+
"step": 2010
|
7045 |
+
},
|
7046 |
+
{
|
7047 |
+
"epoch": 0.15457897971727105,
|
7048 |
+
"grad_norm": 4.5336012840271,
|
7049 |
+
"learning_rate": 1.9381684081130917e-05,
|
7050 |
+
"loss": 1.5995,
|
7051 |
+
"step": 2012
|
7052 |
+
},
|
7053 |
+
{
|
7054 |
+
"epoch": 0.15473263675476337,
|
7055 |
+
"grad_norm": 4.144698619842529,
|
7056 |
+
"learning_rate": 1.9381069452980947e-05,
|
7057 |
+
"loss": 1.6343,
|
7058 |
+
"step": 2014
|
7059 |
+
},
|
7060 |
+
{
|
7061 |
+
"epoch": 0.15488629379225569,
|
7062 |
+
"grad_norm": 4.033304691314697,
|
7063 |
+
"learning_rate": 1.938045482483098e-05,
|
7064 |
+
"loss": 1.6888,
|
7065 |
+
"step": 2016
|
7066 |
+
},
|
7067 |
+
{
|
7068 |
+
"epoch": 0.155039950829748,
|
7069 |
+
"grad_norm": 4.00151252746582,
|
7070 |
+
"learning_rate": 1.937984019668101e-05,
|
7071 |
+
"loss": 1.5246,
|
7072 |
+
"step": 2018
|
7073 |
+
},
|
7074 |
+
{
|
7075 |
+
"epoch": 0.15519360786724032,
|
7076 |
+
"grad_norm": 4.766987323760986,
|
7077 |
+
"learning_rate": 1.937922556853104e-05,
|
7078 |
+
"loss": 1.7345,
|
7079 |
+
"step": 2020
|
7080 |
+
},
|
7081 |
+
{
|
7082 |
+
"epoch": 0.15534726490473263,
|
7083 |
+
"grad_norm": 4.144290924072266,
|
7084 |
+
"learning_rate": 1.9378610940381072e-05,
|
7085 |
+
"loss": 1.5848,
|
7086 |
+
"step": 2022
|
7087 |
+
},
|
7088 |
+
{
|
7089 |
+
"epoch": 0.15550092194222495,
|
7090 |
+
"grad_norm": 4.038874626159668,
|
7091 |
+
"learning_rate": 1.9377996312231102e-05,
|
7092 |
+
"loss": 1.6425,
|
7093 |
+
"step": 2024
|
7094 |
+
},
|
7095 |
+
{
|
7096 |
+
"epoch": 0.15565457897971727,
|
7097 |
+
"grad_norm": 3.832429885864258,
|
7098 |
+
"learning_rate": 1.9377381684081135e-05,
|
7099 |
+
"loss": 1.5454,
|
7100 |
+
"step": 2026
|
7101 |
+
},
|
7102 |
+
{
|
7103 |
+
"epoch": 0.15580823601720958,
|
7104 |
+
"grad_norm": 4.775087833404541,
|
7105 |
+
"learning_rate": 1.937676705593116e-05,
|
7106 |
+
"loss": 1.6767,
|
7107 |
+
"step": 2028
|
7108 |
+
},
|
7109 |
+
{
|
7110 |
+
"epoch": 0.1559618930547019,
|
7111 |
+
"grad_norm": 3.997192144393921,
|
7112 |
+
"learning_rate": 1.9376152427781194e-05,
|
7113 |
+
"loss": 1.5539,
|
7114 |
+
"step": 2030
|
7115 |
+
},
|
7116 |
+
{
|
7117 |
+
"epoch": 0.15611555009219422,
|
7118 |
+
"grad_norm": 3.764519453048706,
|
7119 |
+
"learning_rate": 1.9375537799631224e-05,
|
7120 |
+
"loss": 1.46,
|
7121 |
+
"step": 2032
|
7122 |
+
},
|
7123 |
+
{
|
7124 |
+
"epoch": 0.15626920712968653,
|
7125 |
+
"grad_norm": 4.074234962463379,
|
7126 |
+
"learning_rate": 1.9374923171481254e-05,
|
7127 |
+
"loss": 1.5064,
|
7128 |
+
"step": 2034
|
7129 |
+
},
|
7130 |
+
{
|
7131 |
+
"epoch": 0.15642286416717885,
|
7132 |
+
"grad_norm": 4.302229881286621,
|
7133 |
+
"learning_rate": 1.9374308543331287e-05,
|
7134 |
+
"loss": 1.6276,
|
7135 |
+
"step": 2036
|
7136 |
+
},
|
7137 |
+
{
|
7138 |
+
"epoch": 0.15657652120467117,
|
7139 |
+
"grad_norm": 4.602327346801758,
|
7140 |
+
"learning_rate": 1.9373693915181316e-05,
|
7141 |
+
"loss": 1.5551,
|
7142 |
+
"step": 2038
|
7143 |
+
},
|
7144 |
+
{
|
7145 |
+
"epoch": 0.15673017824216348,
|
7146 |
+
"grad_norm": 4.131155490875244,
|
7147 |
+
"learning_rate": 1.9373079287031346e-05,
|
7148 |
+
"loss": 1.4554,
|
7149 |
+
"step": 2040
|
7150 |
+
},
|
7151 |
+
{
|
7152 |
+
"epoch": 0.1568838352796558,
|
7153 |
+
"grad_norm": 4.661727428436279,
|
7154 |
+
"learning_rate": 1.937246465888138e-05,
|
7155 |
+
"loss": 1.6566,
|
7156 |
+
"step": 2042
|
7157 |
+
},
|
7158 |
+
{
|
7159 |
+
"epoch": 0.15703749231714811,
|
7160 |
+
"grad_norm": 4.23723030090332,
|
7161 |
+
"learning_rate": 1.937185003073141e-05,
|
7162 |
+
"loss": 1.6038,
|
7163 |
+
"step": 2044
|
7164 |
+
},
|
7165 |
+
{
|
7166 |
+
"epoch": 0.15719114935464043,
|
7167 |
+
"grad_norm": 3.995077133178711,
|
7168 |
+
"learning_rate": 1.9371235402581442e-05,
|
7169 |
+
"loss": 1.7374,
|
7170 |
+
"step": 2046
|
7171 |
+
},
|
7172 |
+
{
|
7173 |
+
"epoch": 0.15734480639213275,
|
7174 |
+
"grad_norm": 4.001912593841553,
|
7175 |
+
"learning_rate": 1.937062077443147e-05,
|
7176 |
+
"loss": 1.5991,
|
7177 |
+
"step": 2048
|
7178 |
+
},
|
7179 |
+
{
|
7180 |
+
"epoch": 0.15749846342962506,
|
7181 |
+
"grad_norm": 4.558352470397949,
|
7182 |
+
"learning_rate": 1.93700061462815e-05,
|
7183 |
+
"loss": 1.6115,
|
7184 |
+
"step": 2050
|
7185 |
+
},
|
7186 |
+
{
|
7187 |
+
"epoch": 0.1576521204671174,
|
7188 |
+
"grad_norm": 4.651041030883789,
|
7189 |
+
"learning_rate": 1.9369391518131534e-05,
|
7190 |
+
"loss": 1.5451,
|
7191 |
+
"step": 2052
|
7192 |
+
},
|
7193 |
+
{
|
7194 |
+
"epoch": 0.15780577750460972,
|
7195 |
+
"grad_norm": 4.203875541687012,
|
7196 |
+
"learning_rate": 1.936877688998156e-05,
|
7197 |
+
"loss": 1.5506,
|
7198 |
+
"step": 2054
|
7199 |
+
},
|
7200 |
+
{
|
7201 |
+
"epoch": 0.15795943454210204,
|
7202 |
+
"grad_norm": 4.153205394744873,
|
7203 |
+
"learning_rate": 1.9368162261831594e-05,
|
7204 |
+
"loss": 1.5209,
|
7205 |
+
"step": 2056
|
7206 |
+
},
|
7207 |
+
{
|
7208 |
+
"epoch": 0.15811309157959436,
|
7209 |
+
"grad_norm": 4.187285900115967,
|
7210 |
+
"learning_rate": 1.9367547633681623e-05,
|
7211 |
+
"loss": 1.7677,
|
7212 |
+
"step": 2058
|
7213 |
+
},
|
7214 |
+
{
|
7215 |
+
"epoch": 0.15826674861708667,
|
7216 |
+
"grad_norm": 4.654025554656982,
|
7217 |
+
"learning_rate": 1.9366933005531653e-05,
|
7218 |
+
"loss": 1.63,
|
7219 |
+
"step": 2060
|
7220 |
+
},
|
7221 |
+
{
|
7222 |
+
"epoch": 0.158420405654579,
|
7223 |
+
"grad_norm": 4.145837306976318,
|
7224 |
+
"learning_rate": 1.9366318377381686e-05,
|
7225 |
+
"loss": 1.7693,
|
7226 |
+
"step": 2062
|
7227 |
+
},
|
7228 |
+
{
|
7229 |
+
"epoch": 0.1585740626920713,
|
7230 |
+
"grad_norm": 4.076268196105957,
|
7231 |
+
"learning_rate": 1.9365703749231716e-05,
|
7232 |
+
"loss": 1.5436,
|
7233 |
+
"step": 2064
|
7234 |
+
},
|
7235 |
+
{
|
7236 |
+
"epoch": 0.15872771972956362,
|
7237 |
+
"grad_norm": 3.9100687503814697,
|
7238 |
+
"learning_rate": 1.936508912108175e-05,
|
7239 |
+
"loss": 1.5738,
|
7240 |
+
"step": 2066
|
7241 |
+
},
|
7242 |
+
{
|
7243 |
+
"epoch": 0.15888137676705594,
|
7244 |
+
"grad_norm": 4.173727989196777,
|
7245 |
+
"learning_rate": 1.936447449293178e-05,
|
7246 |
+
"loss": 1.6155,
|
7247 |
+
"step": 2068
|
7248 |
+
},
|
7249 |
+
{
|
7250 |
+
"epoch": 0.15903503380454825,
|
7251 |
+
"grad_norm": 4.555833339691162,
|
7252 |
+
"learning_rate": 1.936385986478181e-05,
|
7253 |
+
"loss": 1.6879,
|
7254 |
+
"step": 2070
|
7255 |
+
},
|
7256 |
+
{
|
7257 |
+
"epoch": 0.15918869084204057,
|
7258 |
+
"grad_norm": 4.008725166320801,
|
7259 |
+
"learning_rate": 1.936324523663184e-05,
|
7260 |
+
"loss": 1.5752,
|
7261 |
+
"step": 2072
|
7262 |
+
},
|
7263 |
+
{
|
7264 |
+
"epoch": 0.1593423478795329,
|
7265 |
+
"grad_norm": 5.628279209136963,
|
7266 |
+
"learning_rate": 1.9362630608481868e-05,
|
7267 |
+
"loss": 1.6574,
|
7268 |
+
"step": 2074
|
7269 |
+
},
|
7270 |
+
{
|
7271 |
+
"epoch": 0.1594960049170252,
|
7272 |
+
"grad_norm": 4.112339496612549,
|
7273 |
+
"learning_rate": 1.93620159803319e-05,
|
7274 |
+
"loss": 1.6789,
|
7275 |
+
"step": 2076
|
7276 |
+
},
|
7277 |
+
{
|
7278 |
+
"epoch": 0.15964966195451752,
|
7279 |
+
"grad_norm": 4.378570556640625,
|
7280 |
+
"learning_rate": 1.9361401352181934e-05,
|
7281 |
+
"loss": 1.4837,
|
7282 |
+
"step": 2078
|
7283 |
+
},
|
7284 |
+
{
|
7285 |
+
"epoch": 0.15980331899200984,
|
7286 |
+
"grad_norm": 3.819582223892212,
|
7287 |
+
"learning_rate": 1.936078672403196e-05,
|
7288 |
+
"loss": 1.573,
|
7289 |
+
"step": 2080
|
7290 |
+
},
|
7291 |
+
{
|
7292 |
+
"epoch": 0.15995697602950215,
|
7293 |
+
"grad_norm": 4.303280830383301,
|
7294 |
+
"learning_rate": 1.9360172095881993e-05,
|
7295 |
+
"loss": 1.4278,
|
7296 |
+
"step": 2082
|
7297 |
+
},
|
7298 |
+
{
|
7299 |
+
"epoch": 0.16011063306699447,
|
7300 |
+
"grad_norm": 3.9183764457702637,
|
7301 |
+
"learning_rate": 1.9359557467732023e-05,
|
7302 |
+
"loss": 1.4808,
|
7303 |
+
"step": 2084
|
7304 |
+
},
|
7305 |
+
{
|
7306 |
+
"epoch": 0.16026429010448678,
|
7307 |
+
"grad_norm": 4.484659671783447,
|
7308 |
+
"learning_rate": 1.9358942839582053e-05,
|
7309 |
+
"loss": 1.557,
|
7310 |
+
"step": 2086
|
7311 |
+
},
|
7312 |
+
{
|
7313 |
+
"epoch": 0.1604179471419791,
|
7314 |
+
"grad_norm": 4.600409984588623,
|
7315 |
+
"learning_rate": 1.9358328211432086e-05,
|
7316 |
+
"loss": 1.5848,
|
7317 |
+
"step": 2088
|
7318 |
+
},
|
7319 |
+
{
|
7320 |
+
"epoch": 0.16057160417947142,
|
7321 |
+
"grad_norm": 3.4427108764648438,
|
7322 |
+
"learning_rate": 1.9357713583282115e-05,
|
7323 |
+
"loss": 1.5589,
|
7324 |
+
"step": 2090
|
7325 |
+
},
|
7326 |
+
{
|
7327 |
+
"epoch": 0.16072526121696373,
|
7328 |
+
"grad_norm": 7.8328375816345215,
|
7329 |
+
"learning_rate": 1.935709895513215e-05,
|
7330 |
+
"loss": 1.5519,
|
7331 |
+
"step": 2092
|
7332 |
+
},
|
7333 |
+
{
|
7334 |
+
"epoch": 0.16087891825445605,
|
7335 |
+
"grad_norm": 4.630655765533447,
|
7336 |
+
"learning_rate": 1.9356484326982178e-05,
|
7337 |
+
"loss": 1.6441,
|
7338 |
+
"step": 2094
|
7339 |
+
},
|
7340 |
+
{
|
7341 |
+
"epoch": 0.16103257529194837,
|
7342 |
+
"grad_norm": 4.217165946960449,
|
7343 |
+
"learning_rate": 1.9355869698832208e-05,
|
7344 |
+
"loss": 1.5559,
|
7345 |
+
"step": 2096
|
7346 |
+
},
|
7347 |
+
{
|
7348 |
+
"epoch": 0.16118623232944068,
|
7349 |
+
"grad_norm": 4.941442012786865,
|
7350 |
+
"learning_rate": 1.935525507068224e-05,
|
7351 |
+
"loss": 1.5065,
|
7352 |
+
"step": 2098
|
7353 |
+
},
|
7354 |
+
{
|
7355 |
+
"epoch": 0.161339889366933,
|
7356 |
+
"grad_norm": 4.171699523925781,
|
7357 |
+
"learning_rate": 1.9354640442532267e-05,
|
7358 |
+
"loss": 1.4993,
|
7359 |
+
"step": 2100
|
7360 |
+
},
|
7361 |
+
{
|
7362 |
+
"epoch": 0.16149354640442531,
|
7363 |
+
"grad_norm": 3.8235647678375244,
|
7364 |
+
"learning_rate": 1.93540258143823e-05,
|
7365 |
+
"loss": 1.6035,
|
7366 |
+
"step": 2102
|
7367 |
+
},
|
7368 |
+
{
|
7369 |
+
"epoch": 0.16164720344191763,
|
7370 |
+
"grad_norm": 5.202590465545654,
|
7371 |
+
"learning_rate": 1.935341118623233e-05,
|
7372 |
+
"loss": 1.6397,
|
7373 |
+
"step": 2104
|
7374 |
+
},
|
7375 |
+
{
|
7376 |
+
"epoch": 0.16180086047940995,
|
7377 |
+
"grad_norm": 3.8114516735076904,
|
7378 |
+
"learning_rate": 1.935279655808236e-05,
|
7379 |
+
"loss": 1.5103,
|
7380 |
+
"step": 2106
|
7381 |
+
},
|
7382 |
+
{
|
7383 |
+
"epoch": 0.16195451751690226,
|
7384 |
+
"grad_norm": 4.666793346405029,
|
7385 |
+
"learning_rate": 1.9352181929932393e-05,
|
7386 |
+
"loss": 1.5363,
|
7387 |
+
"step": 2108
|
7388 |
+
},
|
7389 |
+
{
|
7390 |
+
"epoch": 0.16210817455439458,
|
7391 |
+
"grad_norm": 4.18889856338501,
|
7392 |
+
"learning_rate": 1.9351567301782422e-05,
|
7393 |
+
"loss": 1.662,
|
7394 |
+
"step": 2110
|
7395 |
+
},
|
7396 |
+
{
|
7397 |
+
"epoch": 0.1622618315918869,
|
7398 |
+
"grad_norm": 4.392826557159424,
|
7399 |
+
"learning_rate": 1.9350952673632455e-05,
|
7400 |
+
"loss": 1.5492,
|
7401 |
+
"step": 2112
|
7402 |
+
},
|
7403 |
+
{
|
7404 |
+
"epoch": 0.1624154886293792,
|
7405 |
+
"grad_norm": 4.0653839111328125,
|
7406 |
+
"learning_rate": 1.9350338045482485e-05,
|
7407 |
+
"loss": 1.7512,
|
7408 |
+
"step": 2114
|
7409 |
+
},
|
7410 |
+
{
|
7411 |
+
"epoch": 0.16256914566687156,
|
7412 |
+
"grad_norm": 4.020386219024658,
|
7413 |
+
"learning_rate": 1.9349723417332515e-05,
|
7414 |
+
"loss": 1.7604,
|
7415 |
+
"step": 2116
|
7416 |
+
},
|
7417 |
+
{
|
7418 |
+
"epoch": 0.16272280270436387,
|
7419 |
+
"grad_norm": 4.062155723571777,
|
7420 |
+
"learning_rate": 1.9349108789182548e-05,
|
7421 |
+
"loss": 1.7181,
|
7422 |
+
"step": 2118
|
7423 |
+
},
|
7424 |
+
{
|
7425 |
+
"epoch": 0.1628764597418562,
|
7426 |
+
"grad_norm": 4.4019999504089355,
|
7427 |
+
"learning_rate": 1.9348494161032578e-05,
|
7428 |
+
"loss": 1.639,
|
7429 |
+
"step": 2120
|
7430 |
+
},
|
7431 |
+
{
|
7432 |
+
"epoch": 0.1630301167793485,
|
7433 |
+
"grad_norm": 3.761319875717163,
|
7434 |
+
"learning_rate": 1.9347879532882607e-05,
|
7435 |
+
"loss": 1.4084,
|
7436 |
+
"step": 2122
|
7437 |
+
},
|
7438 |
+
{
|
7439 |
+
"epoch": 0.16318377381684082,
|
7440 |
+
"grad_norm": 4.566369533538818,
|
7441 |
+
"learning_rate": 1.934726490473264e-05,
|
7442 |
+
"loss": 1.5193,
|
7443 |
+
"step": 2124
|
7444 |
+
},
|
7445 |
+
{
|
7446 |
+
"epoch": 0.16333743085433314,
|
7447 |
+
"grad_norm": 4.366701602935791,
|
7448 |
+
"learning_rate": 1.9346650276582667e-05,
|
7449 |
+
"loss": 1.5211,
|
7450 |
+
"step": 2126
|
7451 |
+
},
|
7452 |
+
{
|
7453 |
+
"epoch": 0.16349108789182545,
|
7454 |
+
"grad_norm": 4.0437116622924805,
|
7455 |
+
"learning_rate": 1.93460356484327e-05,
|
7456 |
+
"loss": 1.5901,
|
7457 |
+
"step": 2128
|
7458 |
+
},
|
7459 |
+
{
|
7460 |
+
"epoch": 0.16364474492931777,
|
7461 |
+
"grad_norm": 3.9914474487304688,
|
7462 |
+
"learning_rate": 1.934542102028273e-05,
|
7463 |
+
"loss": 1.4975,
|
7464 |
+
"step": 2130
|
7465 |
+
},
|
7466 |
+
{
|
7467 |
+
"epoch": 0.1637984019668101,
|
7468 |
+
"grad_norm": 4.267744541168213,
|
7469 |
+
"learning_rate": 1.9344806392132762e-05,
|
7470 |
+
"loss": 1.6574,
|
7471 |
+
"step": 2132
|
7472 |
+
},
|
7473 |
+
{
|
7474 |
+
"epoch": 0.1639520590043024,
|
7475 |
+
"grad_norm": 3.843414068222046,
|
7476 |
+
"learning_rate": 1.9344191763982792e-05,
|
7477 |
+
"loss": 1.4976,
|
7478 |
+
"step": 2134
|
7479 |
+
},
|
7480 |
+
{
|
7481 |
+
"epoch": 0.16410571604179472,
|
7482 |
+
"grad_norm": 4.3155975341796875,
|
7483 |
+
"learning_rate": 1.9343577135832822e-05,
|
7484 |
+
"loss": 1.5333,
|
7485 |
+
"step": 2136
|
7486 |
+
},
|
7487 |
+
{
|
7488 |
+
"epoch": 0.16425937307928704,
|
7489 |
+
"grad_norm": 4.159292697906494,
|
7490 |
+
"learning_rate": 1.9342962507682855e-05,
|
7491 |
+
"loss": 1.5053,
|
7492 |
+
"step": 2138
|
7493 |
+
},
|
7494 |
+
{
|
7495 |
+
"epoch": 0.16441303011677935,
|
7496 |
+
"grad_norm": 4.296607971191406,
|
7497 |
+
"learning_rate": 1.9342347879532885e-05,
|
7498 |
+
"loss": 1.6112,
|
7499 |
+
"step": 2140
|
7500 |
+
},
|
7501 |
+
{
|
7502 |
+
"epoch": 0.16456668715427167,
|
7503 |
+
"grad_norm": 3.9782469272613525,
|
7504 |
+
"learning_rate": 1.9341733251382914e-05,
|
7505 |
+
"loss": 1.5241,
|
7506 |
+
"step": 2142
|
7507 |
+
},
|
7508 |
+
{
|
7509 |
+
"epoch": 0.16472034419176398,
|
7510 |
+
"grad_norm": 4.183950901031494,
|
7511 |
+
"learning_rate": 1.9341118623232947e-05,
|
7512 |
+
"loss": 1.5901,
|
7513 |
+
"step": 2144
|
7514 |
+
},
|
7515 |
+
{
|
7516 |
+
"epoch": 0.1648740012292563,
|
7517 |
+
"grad_norm": 3.9188575744628906,
|
7518 |
+
"learning_rate": 1.9340503995082977e-05,
|
7519 |
+
"loss": 1.621,
|
7520 |
+
"step": 2146
|
7521 |
+
},
|
7522 |
+
{
|
7523 |
+
"epoch": 0.16502765826674862,
|
7524 |
+
"grad_norm": 4.474676132202148,
|
7525 |
+
"learning_rate": 1.9339889366933007e-05,
|
7526 |
+
"loss": 1.487,
|
7527 |
+
"step": 2148
|
7528 |
+
},
|
7529 |
+
{
|
7530 |
+
"epoch": 0.16518131530424093,
|
7531 |
+
"grad_norm": 4.6410017013549805,
|
7532 |
+
"learning_rate": 1.933927473878304e-05,
|
7533 |
+
"loss": 1.6997,
|
7534 |
+
"step": 2150
|
7535 |
+
},
|
7536 |
+
{
|
7537 |
+
"epoch": 0.16533497234173325,
|
7538 |
+
"grad_norm": 10.933167457580566,
|
7539 |
+
"learning_rate": 1.933866011063307e-05,
|
7540 |
+
"loss": 1.4752,
|
7541 |
+
"step": 2152
|
7542 |
+
},
|
7543 |
+
{
|
7544 |
+
"epoch": 0.16548862937922557,
|
7545 |
+
"grad_norm": 4.885360240936279,
|
7546 |
+
"learning_rate": 1.93380454824831e-05,
|
7547 |
+
"loss": 1.5604,
|
7548 |
+
"step": 2154
|
7549 |
+
},
|
7550 |
+
{
|
7551 |
+
"epoch": 0.16564228641671788,
|
7552 |
+
"grad_norm": 3.9420063495635986,
|
7553 |
+
"learning_rate": 1.933743085433313e-05,
|
7554 |
+
"loss": 1.5909,
|
7555 |
+
"step": 2156
|
7556 |
+
},
|
7557 |
+
{
|
7558 |
+
"epoch": 0.1657959434542102,
|
7559 |
+
"grad_norm": 3.5523521900177,
|
7560 |
+
"learning_rate": 1.9336816226183162e-05,
|
7561 |
+
"loss": 1.6274,
|
7562 |
+
"step": 2158
|
7563 |
+
},
|
7564 |
+
{
|
7565 |
+
"epoch": 0.16594960049170251,
|
7566 |
+
"grad_norm": 3.6621317863464355,
|
7567 |
+
"learning_rate": 1.933620159803319e-05,
|
7568 |
+
"loss": 1.5111,
|
7569 |
+
"step": 2160
|
7570 |
+
},
|
7571 |
+
{
|
7572 |
+
"epoch": 0.16610325752919483,
|
7573 |
+
"grad_norm": 4.57207727432251,
|
7574 |
+
"learning_rate": 1.933558696988322e-05,
|
7575 |
+
"loss": 1.6702,
|
7576 |
+
"step": 2162
|
7577 |
+
},
|
7578 |
+
{
|
7579 |
+
"epoch": 0.16625691456668715,
|
7580 |
+
"grad_norm": 3.9489428997039795,
|
7581 |
+
"learning_rate": 1.9334972341733254e-05,
|
7582 |
+
"loss": 1.589,
|
7583 |
+
"step": 2164
|
7584 |
+
},
|
7585 |
+
{
|
7586 |
+
"epoch": 0.16641057160417946,
|
7587 |
+
"grad_norm": 3.8509654998779297,
|
7588 |
+
"learning_rate": 1.9334357713583284e-05,
|
7589 |
+
"loss": 1.5522,
|
7590 |
+
"step": 2166
|
7591 |
+
},
|
7592 |
+
{
|
7593 |
+
"epoch": 0.16656422864167178,
|
7594 |
+
"grad_norm": 3.6893975734710693,
|
7595 |
+
"learning_rate": 1.9333743085433314e-05,
|
7596 |
+
"loss": 1.4767,
|
7597 |
+
"step": 2168
|
7598 |
+
},
|
7599 |
+
{
|
7600 |
+
"epoch": 0.1667178856791641,
|
7601 |
+
"grad_norm": 4.064876079559326,
|
7602 |
+
"learning_rate": 1.9333128457283347e-05,
|
7603 |
+
"loss": 1.5515,
|
7604 |
+
"step": 2170
|
7605 |
+
},
|
7606 |
+
{
|
7607 |
+
"epoch": 0.1668715427166564,
|
7608 |
+
"grad_norm": 4.427343845367432,
|
7609 |
+
"learning_rate": 1.9332513829133376e-05,
|
7610 |
+
"loss": 1.7098,
|
7611 |
+
"step": 2172
|
7612 |
+
},
|
7613 |
+
{
|
7614 |
+
"epoch": 0.16702519975414873,
|
7615 |
+
"grad_norm": 3.724740505218506,
|
7616 |
+
"learning_rate": 1.9331899200983406e-05,
|
7617 |
+
"loss": 1.5612,
|
7618 |
+
"step": 2174
|
7619 |
+
},
|
7620 |
+
{
|
7621 |
+
"epoch": 0.16717885679164105,
|
7622 |
+
"grad_norm": 4.302028656005859,
|
7623 |
+
"learning_rate": 1.933128457283344e-05,
|
7624 |
+
"loss": 1.641,
|
7625 |
+
"step": 2176
|
7626 |
+
},
|
7627 |
+
{
|
7628 |
+
"epoch": 0.16733251382913336,
|
7629 |
+
"grad_norm": 4.149264335632324,
|
7630 |
+
"learning_rate": 1.933066994468347e-05,
|
7631 |
+
"loss": 1.6923,
|
7632 |
+
"step": 2178
|
7633 |
+
},
|
7634 |
+
{
|
7635 |
+
"epoch": 0.16748617086662568,
|
7636 |
+
"grad_norm": 4.091360092163086,
|
7637 |
+
"learning_rate": 1.93300553165335e-05,
|
7638 |
+
"loss": 1.4934,
|
7639 |
+
"step": 2180
|
7640 |
+
},
|
7641 |
+
{
|
7642 |
+
"epoch": 0.16763982790411802,
|
7643 |
+
"grad_norm": 4.653087139129639,
|
7644 |
+
"learning_rate": 1.9329440688383528e-05,
|
7645 |
+
"loss": 1.6434,
|
7646 |
+
"step": 2182
|
7647 |
+
},
|
7648 |
+
{
|
7649 |
+
"epoch": 0.16779348494161034,
|
7650 |
+
"grad_norm": 4.141650199890137,
|
7651 |
+
"learning_rate": 1.932882606023356e-05,
|
7652 |
+
"loss": 1.5228,
|
7653 |
+
"step": 2184
|
7654 |
+
},
|
7655 |
+
{
|
7656 |
+
"epoch": 0.16794714197910265,
|
7657 |
+
"grad_norm": 4.257270336151123,
|
7658 |
+
"learning_rate": 1.932821143208359e-05,
|
7659 |
+
"loss": 1.7463,
|
7660 |
+
"step": 2186
|
7661 |
+
},
|
7662 |
+
{
|
7663 |
+
"epoch": 0.16810079901659497,
|
7664 |
+
"grad_norm": 4.1163554191589355,
|
7665 |
+
"learning_rate": 1.932759680393362e-05,
|
7666 |
+
"loss": 1.4543,
|
7667 |
+
"step": 2188
|
7668 |
+
},
|
7669 |
+
{
|
7670 |
+
"epoch": 0.1682544560540873,
|
7671 |
+
"grad_norm": 3.6683640480041504,
|
7672 |
+
"learning_rate": 1.9326982175783654e-05,
|
7673 |
+
"loss": 1.4778,
|
7674 |
+
"step": 2190
|
7675 |
+
},
|
7676 |
+
{
|
7677 |
+
"epoch": 0.1684081130915796,
|
7678 |
+
"grad_norm": 3.734006881713867,
|
7679 |
+
"learning_rate": 1.9326367547633683e-05,
|
7680 |
+
"loss": 1.5738,
|
7681 |
+
"step": 2192
|
7682 |
+
},
|
7683 |
+
{
|
7684 |
+
"epoch": 0.16856177012907192,
|
7685 |
+
"grad_norm": 4.454776287078857,
|
7686 |
+
"learning_rate": 1.9325752919483713e-05,
|
7687 |
+
"loss": 1.6569,
|
7688 |
+
"step": 2194
|
7689 |
+
},
|
7690 |
+
{
|
7691 |
+
"epoch": 0.16871542716656424,
|
7692 |
+
"grad_norm": 4.1497883796691895,
|
7693 |
+
"learning_rate": 1.9325138291333746e-05,
|
7694 |
+
"loss": 1.5172,
|
7695 |
+
"step": 2196
|
7696 |
+
},
|
7697 |
+
{
|
7698 |
+
"epoch": 0.16886908420405655,
|
7699 |
+
"grad_norm": 4.288064479827881,
|
7700 |
+
"learning_rate": 1.9324523663183776e-05,
|
7701 |
+
"loss": 1.6416,
|
7702 |
+
"step": 2198
|
7703 |
+
},
|
7704 |
+
{
|
7705 |
+
"epoch": 0.16902274124154887,
|
7706 |
+
"grad_norm": 3.463115930557251,
|
7707 |
+
"learning_rate": 1.9323909035033806e-05,
|
7708 |
+
"loss": 1.4196,
|
7709 |
+
"step": 2200
|
7710 |
+
},
|
7711 |
+
{
|
7712 |
+
"epoch": 0.16917639827904118,
|
7713 |
+
"grad_norm": 5.139834403991699,
|
7714 |
+
"learning_rate": 1.9323294406883835e-05,
|
7715 |
+
"loss": 1.6058,
|
7716 |
+
"step": 2202
|
7717 |
+
},
|
7718 |
+
{
|
7719 |
+
"epoch": 0.1693300553165335,
|
7720 |
+
"grad_norm": 4.1170806884765625,
|
7721 |
+
"learning_rate": 1.9322679778733868e-05,
|
7722 |
+
"loss": 1.4493,
|
7723 |
+
"step": 2204
|
7724 |
+
},
|
7725 |
+
{
|
7726 |
+
"epoch": 0.16948371235402582,
|
7727 |
+
"grad_norm": 3.8090291023254395,
|
7728 |
+
"learning_rate": 1.9322065150583898e-05,
|
7729 |
+
"loss": 1.6205,
|
7730 |
+
"step": 2206
|
7731 |
+
},
|
7732 |
+
{
|
7733 |
+
"epoch": 0.16963736939151813,
|
7734 |
+
"grad_norm": 3.461530923843384,
|
7735 |
+
"learning_rate": 1.9321450522433928e-05,
|
7736 |
+
"loss": 1.4338,
|
7737 |
+
"step": 2208
|
7738 |
+
},
|
7739 |
+
{
|
7740 |
+
"epoch": 0.16979102642901045,
|
7741 |
+
"grad_norm": 4.355661392211914,
|
7742 |
+
"learning_rate": 1.932083589428396e-05,
|
7743 |
+
"loss": 1.6044,
|
7744 |
+
"step": 2210
|
7745 |
+
},
|
7746 |
+
{
|
7747 |
+
"epoch": 0.16994468346650277,
|
7748 |
+
"grad_norm": 3.4141671657562256,
|
7749 |
+
"learning_rate": 1.932022126613399e-05,
|
7750 |
+
"loss": 1.5096,
|
7751 |
+
"step": 2212
|
7752 |
+
},
|
7753 |
+
{
|
7754 |
+
"epoch": 0.17009834050399508,
|
7755 |
+
"grad_norm": 4.202045917510986,
|
7756 |
+
"learning_rate": 1.931960663798402e-05,
|
7757 |
+
"loss": 1.7529,
|
7758 |
+
"step": 2214
|
7759 |
+
},
|
7760 |
+
{
|
7761 |
+
"epoch": 0.1702519975414874,
|
7762 |
+
"grad_norm": 3.8714635372161865,
|
7763 |
+
"learning_rate": 1.9318992009834053e-05,
|
7764 |
+
"loss": 1.5085,
|
7765 |
+
"step": 2216
|
7766 |
+
},
|
7767 |
+
{
|
7768 |
+
"epoch": 0.17040565457897972,
|
7769 |
+
"grad_norm": 5.047643184661865,
|
7770 |
+
"learning_rate": 1.9318377381684083e-05,
|
7771 |
+
"loss": 1.6937,
|
7772 |
+
"step": 2218
|
7773 |
+
},
|
7774 |
+
{
|
7775 |
+
"epoch": 0.17055931161647203,
|
7776 |
+
"grad_norm": 4.094550132751465,
|
7777 |
+
"learning_rate": 1.9317762753534113e-05,
|
7778 |
+
"loss": 1.7053,
|
7779 |
+
"step": 2220
|
7780 |
+
},
|
7781 |
+
{
|
7782 |
+
"epoch": 0.17071296865396435,
|
7783 |
+
"grad_norm": 3.65474796295166,
|
7784 |
+
"learning_rate": 1.9317148125384146e-05,
|
7785 |
+
"loss": 1.5531,
|
7786 |
+
"step": 2222
|
7787 |
+
},
|
7788 |
+
{
|
7789 |
+
"epoch": 0.17086662569145666,
|
7790 |
+
"grad_norm": 4.38794469833374,
|
7791 |
+
"learning_rate": 1.9316533497234175e-05,
|
7792 |
+
"loss": 1.7151,
|
7793 |
+
"step": 2224
|
7794 |
+
},
|
7795 |
+
{
|
7796 |
+
"epoch": 0.17102028272894898,
|
7797 |
+
"grad_norm": 4.336912631988525,
|
7798 |
+
"learning_rate": 1.9315918869084205e-05,
|
7799 |
+
"loss": 1.4871,
|
7800 |
+
"step": 2226
|
7801 |
+
},
|
7802 |
+
{
|
7803 |
+
"epoch": 0.1711739397664413,
|
7804 |
+
"grad_norm": 3.9137933254241943,
|
7805 |
+
"learning_rate": 1.9315304240934235e-05,
|
7806 |
+
"loss": 1.3785,
|
7807 |
+
"step": 2228
|
7808 |
+
},
|
7809 |
+
{
|
7810 |
+
"epoch": 0.1713275968039336,
|
7811 |
+
"grad_norm": 4.241963863372803,
|
7812 |
+
"learning_rate": 1.9314689612784268e-05,
|
7813 |
+
"loss": 1.482,
|
7814 |
+
"step": 2230
|
7815 |
+
},
|
7816 |
+
{
|
7817 |
+
"epoch": 0.17148125384142593,
|
7818 |
+
"grad_norm": 4.047958850860596,
|
7819 |
+
"learning_rate": 1.9314074984634297e-05,
|
7820 |
+
"loss": 1.7098,
|
7821 |
+
"step": 2232
|
7822 |
+
},
|
7823 |
+
{
|
7824 |
+
"epoch": 0.17163491087891825,
|
7825 |
+
"grad_norm": 4.688525676727295,
|
7826 |
+
"learning_rate": 1.9313460356484327e-05,
|
7827 |
+
"loss": 1.5618,
|
7828 |
+
"step": 2234
|
7829 |
+
},
|
7830 |
+
{
|
7831 |
+
"epoch": 0.17178856791641056,
|
7832 |
+
"grad_norm": 4.020751953125,
|
7833 |
+
"learning_rate": 1.931284572833436e-05,
|
7834 |
+
"loss": 1.5526,
|
7835 |
+
"step": 2236
|
7836 |
+
},
|
7837 |
+
{
|
7838 |
+
"epoch": 0.17194222495390288,
|
7839 |
+
"grad_norm": 3.93445086479187,
|
7840 |
+
"learning_rate": 1.931223110018439e-05,
|
7841 |
+
"loss": 1.6595,
|
7842 |
+
"step": 2238
|
7843 |
+
},
|
7844 |
+
{
|
7845 |
+
"epoch": 0.1720958819913952,
|
7846 |
+
"grad_norm": 5.324620723724365,
|
7847 |
+
"learning_rate": 1.931161647203442e-05,
|
7848 |
+
"loss": 1.657,
|
7849 |
+
"step": 2240
|
7850 |
+
},
|
7851 |
+
{
|
7852 |
+
"epoch": 0.1722495390288875,
|
7853 |
+
"grad_norm": 3.6193902492523193,
|
7854 |
+
"learning_rate": 1.9311001843884453e-05,
|
7855 |
+
"loss": 1.5,
|
7856 |
+
"step": 2242
|
7857 |
+
},
|
7858 |
+
{
|
7859 |
+
"epoch": 0.17240319606637983,
|
7860 |
+
"grad_norm": 4.382716178894043,
|
7861 |
+
"learning_rate": 1.9310387215734482e-05,
|
7862 |
+
"loss": 1.6785,
|
7863 |
+
"step": 2244
|
7864 |
+
},
|
7865 |
+
{
|
7866 |
+
"epoch": 0.17255685310387217,
|
7867 |
+
"grad_norm": 3.7359304428100586,
|
7868 |
+
"learning_rate": 1.9309772587584512e-05,
|
7869 |
+
"loss": 1.5003,
|
7870 |
+
"step": 2246
|
7871 |
+
},
|
7872 |
+
{
|
7873 |
+
"epoch": 0.1727105101413645,
|
7874 |
+
"grad_norm": 4.570140838623047,
|
7875 |
+
"learning_rate": 1.9309157959434545e-05,
|
7876 |
+
"loss": 1.6353,
|
7877 |
+
"step": 2248
|
7878 |
+
},
|
7879 |
+
{
|
7880 |
+
"epoch": 0.1728641671788568,
|
7881 |
+
"grad_norm": 4.809631824493408,
|
7882 |
+
"learning_rate": 1.9308543331284575e-05,
|
7883 |
+
"loss": 1.5898,
|
7884 |
+
"step": 2250
|
7885 |
+
},
|
7886 |
+
{
|
7887 |
+
"epoch": 0.17301782421634912,
|
7888 |
+
"grad_norm": 4.994627475738525,
|
7889 |
+
"learning_rate": 1.9307928703134604e-05,
|
7890 |
+
"loss": 1.6614,
|
7891 |
+
"step": 2252
|
7892 |
+
},
|
7893 |
+
{
|
7894 |
+
"epoch": 0.17317148125384144,
|
7895 |
+
"grad_norm": 4.121060371398926,
|
7896 |
+
"learning_rate": 1.9307314074984634e-05,
|
7897 |
+
"loss": 1.6817,
|
7898 |
+
"step": 2254
|
7899 |
+
},
|
7900 |
+
{
|
7901 |
+
"epoch": 0.17332513829133375,
|
7902 |
+
"grad_norm": 4.009014129638672,
|
7903 |
+
"learning_rate": 1.9306699446834667e-05,
|
7904 |
+
"loss": 1.5645,
|
7905 |
+
"step": 2256
|
7906 |
+
},
|
7907 |
+
{
|
7908 |
+
"epoch": 0.17347879532882607,
|
7909 |
+
"grad_norm": 4.27223539352417,
|
7910 |
+
"learning_rate": 1.9306084818684697e-05,
|
7911 |
+
"loss": 1.6764,
|
7912 |
+
"step": 2258
|
7913 |
+
},
|
7914 |
+
{
|
7915 |
+
"epoch": 0.17363245236631838,
|
7916 |
+
"grad_norm": 4.074213027954102,
|
7917 |
+
"learning_rate": 1.9305470190534727e-05,
|
7918 |
+
"loss": 1.6095,
|
7919 |
+
"step": 2260
|
7920 |
+
},
|
7921 |
+
{
|
7922 |
+
"epoch": 0.1737861094038107,
|
7923 |
+
"grad_norm": 3.6030173301696777,
|
7924 |
+
"learning_rate": 1.930485556238476e-05,
|
7925 |
+
"loss": 1.5907,
|
7926 |
+
"step": 2262
|
7927 |
+
},
|
7928 |
+
{
|
7929 |
+
"epoch": 0.17393976644130302,
|
7930 |
+
"grad_norm": 4.34961462020874,
|
7931 |
+
"learning_rate": 1.930424093423479e-05,
|
7932 |
+
"loss": 1.611,
|
7933 |
+
"step": 2264
|
7934 |
+
},
|
7935 |
+
{
|
7936 |
+
"epoch": 0.17409342347879533,
|
7937 |
+
"grad_norm": 3.9723429679870605,
|
7938 |
+
"learning_rate": 1.930362630608482e-05,
|
7939 |
+
"loss": 1.5256,
|
7940 |
+
"step": 2266
|
7941 |
+
},
|
7942 |
+
{
|
7943 |
+
"epoch": 0.17424708051628765,
|
7944 |
+
"grad_norm": 3.7899746894836426,
|
7945 |
+
"learning_rate": 1.9303011677934852e-05,
|
7946 |
+
"loss": 1.6923,
|
7947 |
+
"step": 2268
|
7948 |
+
},
|
7949 |
+
{
|
7950 |
+
"epoch": 0.17440073755377997,
|
7951 |
+
"grad_norm": 4.415828227996826,
|
7952 |
+
"learning_rate": 1.9302397049784882e-05,
|
7953 |
+
"loss": 1.4681,
|
7954 |
+
"step": 2270
|
7955 |
+
},
|
7956 |
+
{
|
7957 |
+
"epoch": 0.17455439459127228,
|
7958 |
+
"grad_norm": 5.616640567779541,
|
7959 |
+
"learning_rate": 1.930178242163491e-05,
|
7960 |
+
"loss": 1.6252,
|
7961 |
+
"step": 2272
|
7962 |
+
},
|
7963 |
+
{
|
7964 |
+
"epoch": 0.1747080516287646,
|
7965 |
+
"grad_norm": 3.5017950534820557,
|
7966 |
+
"learning_rate": 1.9301167793484944e-05,
|
7967 |
+
"loss": 1.4621,
|
7968 |
+
"step": 2274
|
7969 |
+
},
|
7970 |
+
{
|
7971 |
+
"epoch": 0.17486170866625692,
|
7972 |
+
"grad_norm": 4.120169639587402,
|
7973 |
+
"learning_rate": 1.9300553165334974e-05,
|
7974 |
+
"loss": 1.5537,
|
7975 |
+
"step": 2276
|
7976 |
+
},
|
7977 |
+
{
|
7978 |
+
"epoch": 0.17501536570374923,
|
7979 |
+
"grad_norm": 4.489522457122803,
|
7980 |
+
"learning_rate": 1.9299938537185007e-05,
|
7981 |
+
"loss": 1.6915,
|
7982 |
+
"step": 2278
|
7983 |
+
},
|
7984 |
+
{
|
7985 |
+
"epoch": 0.17516902274124155,
|
7986 |
+
"grad_norm": 4.285830974578857,
|
7987 |
+
"learning_rate": 1.9299323909035034e-05,
|
7988 |
+
"loss": 1.5101,
|
7989 |
+
"step": 2280
|
7990 |
+
},
|
7991 |
+
{
|
7992 |
+
"epoch": 0.17532267977873386,
|
7993 |
+
"grad_norm": 3.9038150310516357,
|
7994 |
+
"learning_rate": 1.9298709280885067e-05,
|
7995 |
+
"loss": 1.6067,
|
7996 |
+
"step": 2282
|
7997 |
+
},
|
7998 |
+
{
|
7999 |
+
"epoch": 0.17547633681622618,
|
8000 |
+
"grad_norm": 3.8521156311035156,
|
8001 |
+
"learning_rate": 1.9298094652735096e-05,
|
8002 |
+
"loss": 1.4494,
|
8003 |
+
"step": 2284
|
8004 |
+
},
|
8005 |
+
{
|
8006 |
+
"epoch": 0.1756299938537185,
|
8007 |
+
"grad_norm": 7.329223155975342,
|
8008 |
+
"learning_rate": 1.9297480024585126e-05,
|
8009 |
+
"loss": 1.5588,
|
8010 |
+
"step": 2286
|
8011 |
+
},
|
8012 |
+
{
|
8013 |
+
"epoch": 0.1757836508912108,
|
8014 |
+
"grad_norm": 3.97939395904541,
|
8015 |
+
"learning_rate": 1.929686539643516e-05,
|
8016 |
+
"loss": 1.4802,
|
8017 |
+
"step": 2288
|
8018 |
+
},
|
8019 |
+
{
|
8020 |
+
"epoch": 0.17593730792870313,
|
8021 |
+
"grad_norm": 3.464115858078003,
|
8022 |
+
"learning_rate": 1.929625076828519e-05,
|
8023 |
+
"loss": 1.4913,
|
8024 |
+
"step": 2290
|
8025 |
+
},
|
8026 |
+
{
|
8027 |
+
"epoch": 0.17609096496619545,
|
8028 |
+
"grad_norm": 4.677506446838379,
|
8029 |
+
"learning_rate": 1.929563614013522e-05,
|
8030 |
+
"loss": 1.5254,
|
8031 |
+
"step": 2292
|
8032 |
+
},
|
8033 |
+
{
|
8034 |
+
"epoch": 0.17624462200368776,
|
8035 |
+
"grad_norm": 3.7886929512023926,
|
8036 |
+
"learning_rate": 1.929502151198525e-05,
|
8037 |
+
"loss": 1.6297,
|
8038 |
+
"step": 2294
|
8039 |
+
},
|
8040 |
+
{
|
8041 |
+
"epoch": 0.17639827904118008,
|
8042 |
+
"grad_norm": 3.5035488605499268,
|
8043 |
+
"learning_rate": 1.929440688383528e-05,
|
8044 |
+
"loss": 1.657,
|
8045 |
+
"step": 2296
|
8046 |
+
},
|
8047 |
+
{
|
8048 |
+
"epoch": 0.1765519360786724,
|
8049 |
+
"grad_norm": 4.172173976898193,
|
8050 |
+
"learning_rate": 1.9293792255685314e-05,
|
8051 |
+
"loss": 1.5612,
|
8052 |
+
"step": 2298
|
8053 |
+
},
|
8054 |
+
{
|
8055 |
+
"epoch": 0.1767055931161647,
|
8056 |
+
"grad_norm": 3.9481425285339355,
|
8057 |
+
"learning_rate": 1.929317762753534e-05,
|
8058 |
+
"loss": 1.4419,
|
8059 |
+
"step": 2300
|
8060 |
+
},
|
8061 |
+
{
|
8062 |
+
"epoch": 0.17685925015365703,
|
8063 |
+
"grad_norm": 3.922159433364868,
|
8064 |
+
"learning_rate": 1.9292562999385374e-05,
|
8065 |
+
"loss": 1.5459,
|
8066 |
+
"step": 2302
|
8067 |
+
},
|
8068 |
+
{
|
8069 |
+
"epoch": 0.17701290719114934,
|
8070 |
+
"grad_norm": 4.2247233390808105,
|
8071 |
+
"learning_rate": 1.9291948371235403e-05,
|
8072 |
+
"loss": 1.6869,
|
8073 |
+
"step": 2304
|
8074 |
+
},
|
8075 |
+
{
|
8076 |
+
"epoch": 0.17716656422864166,
|
8077 |
+
"grad_norm": 4.960201740264893,
|
8078 |
+
"learning_rate": 1.9291333743085433e-05,
|
8079 |
+
"loss": 1.4055,
|
8080 |
+
"step": 2306
|
8081 |
+
},
|
8082 |
+
{
|
8083 |
+
"epoch": 0.17732022126613398,
|
8084 |
+
"grad_norm": 4.675178527832031,
|
8085 |
+
"learning_rate": 1.9290719114935466e-05,
|
8086 |
+
"loss": 1.4635,
|
8087 |
+
"step": 2308
|
8088 |
+
},
|
8089 |
+
{
|
8090 |
+
"epoch": 0.1774738783036263,
|
8091 |
+
"grad_norm": 4.3724589347839355,
|
8092 |
+
"learning_rate": 1.9290104486785496e-05,
|
8093 |
+
"loss": 1.4423,
|
8094 |
+
"step": 2310
|
8095 |
+
},
|
8096 |
+
{
|
8097 |
+
"epoch": 0.17762753534111864,
|
8098 |
+
"grad_norm": 4.629543304443359,
|
8099 |
+
"learning_rate": 1.9289489858635525e-05,
|
8100 |
+
"loss": 1.6696,
|
8101 |
+
"step": 2312
|
8102 |
+
},
|
8103 |
+
{
|
8104 |
+
"epoch": 0.17778119237861095,
|
8105 |
+
"grad_norm": 3.8183395862579346,
|
8106 |
+
"learning_rate": 1.928887523048556e-05,
|
8107 |
+
"loss": 1.5558,
|
8108 |
+
"step": 2314
|
8109 |
+
},
|
8110 |
+
{
|
8111 |
+
"epoch": 0.17793484941610327,
|
8112 |
+
"grad_norm": 3.7984275817871094,
|
8113 |
+
"learning_rate": 1.9288260602335588e-05,
|
8114 |
+
"loss": 1.5884,
|
8115 |
+
"step": 2316
|
8116 |
+
},
|
8117 |
+
{
|
8118 |
+
"epoch": 0.17808850645359559,
|
8119 |
+
"grad_norm": 3.9068145751953125,
|
8120 |
+
"learning_rate": 1.928764597418562e-05,
|
8121 |
+
"loss": 1.6217,
|
8122 |
+
"step": 2318
|
8123 |
+
},
|
8124 |
+
{
|
8125 |
+
"epoch": 0.1782421634910879,
|
8126 |
+
"grad_norm": 4.159458160400391,
|
8127 |
+
"learning_rate": 1.928703134603565e-05,
|
8128 |
+
"loss": 1.5934,
|
8129 |
+
"step": 2320
|
8130 |
+
},
|
8131 |
+
{
|
8132 |
+
"epoch": 0.17839582052858022,
|
8133 |
+
"grad_norm": 4.013321876525879,
|
8134 |
+
"learning_rate": 1.928641671788568e-05,
|
8135 |
+
"loss": 1.5542,
|
8136 |
+
"step": 2322
|
8137 |
+
},
|
8138 |
+
{
|
8139 |
+
"epoch": 0.17854947756607253,
|
8140 |
+
"grad_norm": 4.504942893981934,
|
8141 |
+
"learning_rate": 1.9285802089735714e-05,
|
8142 |
+
"loss": 1.7811,
|
8143 |
+
"step": 2324
|
8144 |
+
},
|
8145 |
+
{
|
8146 |
+
"epoch": 0.17870313460356485,
|
8147 |
+
"grad_norm": 4.721269130706787,
|
8148 |
+
"learning_rate": 1.928518746158574e-05,
|
8149 |
+
"loss": 1.4974,
|
8150 |
+
"step": 2326
|
8151 |
+
},
|
8152 |
+
{
|
8153 |
+
"epoch": 0.17885679164105717,
|
8154 |
+
"grad_norm": 3.662440776824951,
|
8155 |
+
"learning_rate": 1.9284572833435773e-05,
|
8156 |
+
"loss": 1.6497,
|
8157 |
+
"step": 2328
|
8158 |
+
},
|
8159 |
+
{
|
8160 |
+
"epoch": 0.17901044867854948,
|
8161 |
+
"grad_norm": 3.8075759410858154,
|
8162 |
+
"learning_rate": 1.9283958205285803e-05,
|
8163 |
+
"loss": 1.4787,
|
8164 |
+
"step": 2330
|
8165 |
+
},
|
8166 |
+
{
|
8167 |
+
"epoch": 0.1791641057160418,
|
8168 |
+
"grad_norm": 4.013290882110596,
|
8169 |
+
"learning_rate": 1.9283343577135832e-05,
|
8170 |
+
"loss": 1.5371,
|
8171 |
+
"step": 2332
|
8172 |
+
},
|
8173 |
+
{
|
8174 |
+
"epoch": 0.17931776275353412,
|
8175 |
+
"grad_norm": 4.095331192016602,
|
8176 |
+
"learning_rate": 1.9282728948985865e-05,
|
8177 |
+
"loss": 1.6089,
|
8178 |
+
"step": 2334
|
8179 |
+
},
|
8180 |
+
{
|
8181 |
+
"epoch": 0.17947141979102643,
|
8182 |
+
"grad_norm": 4.137665748596191,
|
8183 |
+
"learning_rate": 1.9282114320835895e-05,
|
8184 |
+
"loss": 1.6971,
|
8185 |
+
"step": 2336
|
8186 |
+
},
|
8187 |
+
{
|
8188 |
+
"epoch": 0.17962507682851875,
|
8189 |
+
"grad_norm": 4.847195625305176,
|
8190 |
+
"learning_rate": 1.9281499692685928e-05,
|
8191 |
+
"loss": 1.365,
|
8192 |
+
"step": 2338
|
8193 |
+
},
|
8194 |
+
{
|
8195 |
+
"epoch": 0.17977873386601106,
|
8196 |
+
"grad_norm": 4.068114280700684,
|
8197 |
+
"learning_rate": 1.9280885064535958e-05,
|
8198 |
+
"loss": 1.7029,
|
8199 |
+
"step": 2340
|
8200 |
+
},
|
8201 |
+
{
|
8202 |
+
"epoch": 0.17993239090350338,
|
8203 |
+
"grad_norm": 4.104188442230225,
|
8204 |
+
"learning_rate": 1.9280270436385988e-05,
|
8205 |
+
"loss": 1.636,
|
8206 |
+
"step": 2342
|
8207 |
+
},
|
8208 |
+
{
|
8209 |
+
"epoch": 0.1800860479409957,
|
8210 |
+
"grad_norm": 4.033984661102295,
|
8211 |
+
"learning_rate": 1.927965580823602e-05,
|
8212 |
+
"loss": 1.5451,
|
8213 |
+
"step": 2344
|
8214 |
+
},
|
8215 |
+
{
|
8216 |
+
"epoch": 0.180239704978488,
|
8217 |
+
"grad_norm": 4.00771951675415,
|
8218 |
+
"learning_rate": 1.927904118008605e-05,
|
8219 |
+
"loss": 1.5162,
|
8220 |
+
"step": 2346
|
8221 |
+
},
|
8222 |
+
{
|
8223 |
+
"epoch": 0.18039336201598033,
|
8224 |
+
"grad_norm": 4.097219467163086,
|
8225 |
+
"learning_rate": 1.927842655193608e-05,
|
8226 |
+
"loss": 1.6558,
|
8227 |
+
"step": 2348
|
8228 |
+
},
|
8229 |
+
{
|
8230 |
+
"epoch": 0.18054701905347265,
|
8231 |
+
"grad_norm": 4.354104042053223,
|
8232 |
+
"learning_rate": 1.9277811923786113e-05,
|
8233 |
+
"loss": 1.61,
|
8234 |
+
"step": 2350
|
8235 |
+
},
|
8236 |
+
{
|
8237 |
+
"epoch": 0.18070067609096496,
|
8238 |
+
"grad_norm": 4.535645484924316,
|
8239 |
+
"learning_rate": 1.927719729563614e-05,
|
8240 |
+
"loss": 1.6289,
|
8241 |
+
"step": 2352
|
8242 |
+
},
|
8243 |
+
{
|
8244 |
+
"epoch": 0.18085433312845728,
|
8245 |
+
"grad_norm": 4.078785419464111,
|
8246 |
+
"learning_rate": 1.9276582667486172e-05,
|
8247 |
+
"loss": 1.7284,
|
8248 |
+
"step": 2354
|
8249 |
+
},
|
8250 |
+
{
|
8251 |
+
"epoch": 0.1810079901659496,
|
8252 |
+
"grad_norm": 4.275857448577881,
|
8253 |
+
"learning_rate": 1.9275968039336202e-05,
|
8254 |
+
"loss": 1.5461,
|
8255 |
+
"step": 2356
|
8256 |
+
},
|
8257 |
+
{
|
8258 |
+
"epoch": 0.1811616472034419,
|
8259 |
+
"grad_norm": 4.156821250915527,
|
8260 |
+
"learning_rate": 1.9275353411186232e-05,
|
8261 |
+
"loss": 1.5956,
|
8262 |
+
"step": 2358
|
8263 |
+
},
|
8264 |
+
{
|
8265 |
+
"epoch": 0.18131530424093423,
|
8266 |
+
"grad_norm": 3.6036367416381836,
|
8267 |
+
"learning_rate": 1.9274738783036265e-05,
|
8268 |
+
"loss": 1.5949,
|
8269 |
+
"step": 2360
|
8270 |
+
},
|
8271 |
+
{
|
8272 |
+
"epoch": 0.18146896127842654,
|
8273 |
+
"grad_norm": 4.079240798950195,
|
8274 |
+
"learning_rate": 1.9274124154886295e-05,
|
8275 |
+
"loss": 1.5671,
|
8276 |
+
"step": 2362
|
8277 |
+
},
|
8278 |
+
{
|
8279 |
+
"epoch": 0.18162261831591886,
|
8280 |
+
"grad_norm": 4.024125576019287,
|
8281 |
+
"learning_rate": 1.9273509526736328e-05,
|
8282 |
+
"loss": 1.4904,
|
8283 |
+
"step": 2364
|
8284 |
+
},
|
8285 |
+
{
|
8286 |
+
"epoch": 0.18177627535341118,
|
8287 |
+
"grad_norm": 3.7651987075805664,
|
8288 |
+
"learning_rate": 1.9272894898586357e-05,
|
8289 |
+
"loss": 1.5179,
|
8290 |
+
"step": 2366
|
8291 |
+
},
|
8292 |
+
{
|
8293 |
+
"epoch": 0.1819299323909035,
|
8294 |
+
"grad_norm": 3.8718831539154053,
|
8295 |
+
"learning_rate": 1.9272280270436387e-05,
|
8296 |
+
"loss": 1.4699,
|
8297 |
+
"step": 2368
|
8298 |
+
},
|
8299 |
+
{
|
8300 |
+
"epoch": 0.1820835894283958,
|
8301 |
+
"grad_norm": 4.548869609832764,
|
8302 |
+
"learning_rate": 1.927166564228642e-05,
|
8303 |
+
"loss": 1.6476,
|
8304 |
+
"step": 2370
|
8305 |
+
},
|
8306 |
+
{
|
8307 |
+
"epoch": 0.18223724646588813,
|
8308 |
+
"grad_norm": 4.528201580047607,
|
8309 |
+
"learning_rate": 1.927105101413645e-05,
|
8310 |
+
"loss": 1.6755,
|
8311 |
+
"step": 2372
|
8312 |
+
},
|
8313 |
+
{
|
8314 |
+
"epoch": 0.18239090350338044,
|
8315 |
+
"grad_norm": 4.0802388191223145,
|
8316 |
+
"learning_rate": 1.927043638598648e-05,
|
8317 |
+
"loss": 1.5722,
|
8318 |
+
"step": 2374
|
8319 |
+
},
|
8320 |
+
{
|
8321 |
+
"epoch": 0.18254456054087279,
|
8322 |
+
"grad_norm": 4.145775318145752,
|
8323 |
+
"learning_rate": 1.9269821757836513e-05,
|
8324 |
+
"loss": 1.5816,
|
8325 |
+
"step": 2376
|
8326 |
+
},
|
8327 |
+
{
|
8328 |
+
"epoch": 0.1826982175783651,
|
8329 |
+
"grad_norm": 3.925696611404419,
|
8330 |
+
"learning_rate": 1.926920712968654e-05,
|
8331 |
+
"loss": 1.625,
|
8332 |
+
"step": 2378
|
8333 |
+
},
|
8334 |
+
{
|
8335 |
+
"epoch": 0.18285187461585742,
|
8336 |
+
"grad_norm": 3.8499910831451416,
|
8337 |
+
"learning_rate": 1.9268592501536572e-05,
|
8338 |
+
"loss": 1.5522,
|
8339 |
+
"step": 2380
|
8340 |
+
},
|
8341 |
+
{
|
8342 |
+
"epoch": 0.18300553165334973,
|
8343 |
+
"grad_norm": 4.174738883972168,
|
8344 |
+
"learning_rate": 1.92679778733866e-05,
|
8345 |
+
"loss": 1.561,
|
8346 |
+
"step": 2382
|
8347 |
+
},
|
8348 |
+
{
|
8349 |
+
"epoch": 0.18315918869084205,
|
8350 |
+
"grad_norm": 3.801260232925415,
|
8351 |
+
"learning_rate": 1.9267363245236635e-05,
|
8352 |
+
"loss": 1.5159,
|
8353 |
+
"step": 2384
|
8354 |
+
},
|
8355 |
+
{
|
8356 |
+
"epoch": 0.18331284572833437,
|
8357 |
+
"grad_norm": 4.0040202140808105,
|
8358 |
+
"learning_rate": 1.9266748617086664e-05,
|
8359 |
+
"loss": 1.6021,
|
8360 |
+
"step": 2386
|
8361 |
+
},
|
8362 |
+
{
|
8363 |
+
"epoch": 0.18346650276582668,
|
8364 |
+
"grad_norm": 4.132852554321289,
|
8365 |
+
"learning_rate": 1.9266133988936694e-05,
|
8366 |
+
"loss": 1.6887,
|
8367 |
+
"step": 2388
|
8368 |
+
},
|
8369 |
+
{
|
8370 |
+
"epoch": 0.183620159803319,
|
8371 |
+
"grad_norm": 3.7313075065612793,
|
8372 |
+
"learning_rate": 1.9265519360786727e-05,
|
8373 |
+
"loss": 1.4582,
|
8374 |
+
"step": 2390
|
8375 |
+
},
|
8376 |
+
{
|
8377 |
+
"epoch": 0.18377381684081132,
|
8378 |
+
"grad_norm": 3.824453115463257,
|
8379 |
+
"learning_rate": 1.9264904732636757e-05,
|
8380 |
+
"loss": 1.4394,
|
8381 |
+
"step": 2392
|
8382 |
+
},
|
8383 |
+
{
|
8384 |
+
"epoch": 0.18392747387830363,
|
8385 |
+
"grad_norm": 4.368152141571045,
|
8386 |
+
"learning_rate": 1.9264290104486786e-05,
|
8387 |
+
"loss": 1.5397,
|
8388 |
+
"step": 2394
|
8389 |
+
},
|
8390 |
+
{
|
8391 |
+
"epoch": 0.18408113091579595,
|
8392 |
+
"grad_norm": 3.7525463104248047,
|
8393 |
+
"learning_rate": 1.926367547633682e-05,
|
8394 |
+
"loss": 1.6875,
|
8395 |
+
"step": 2396
|
8396 |
+
},
|
8397 |
+
{
|
8398 |
+
"epoch": 0.18423478795328826,
|
8399 |
+
"grad_norm": 4.229045391082764,
|
8400 |
+
"learning_rate": 1.9263060848186846e-05,
|
8401 |
+
"loss": 1.6248,
|
8402 |
+
"step": 2398
|
8403 |
+
},
|
8404 |
+
{
|
8405 |
+
"epoch": 0.18438844499078058,
|
8406 |
+
"grad_norm": 3.9596312046051025,
|
8407 |
+
"learning_rate": 1.926244622003688e-05,
|
8408 |
+
"loss": 1.5278,
|
8409 |
+
"step": 2400
|
8410 |
}
|
8411 |
],
|
8412 |
"logging_steps": 2,
|
|
|
8426 |
"attributes": {}
|
8427 |
}
|
8428 |
},
|
8429 |
+
"total_flos": 1.5320091651263693e+19,
|
8430 |
"train_batch_size": 8,
|
8431 |
"trial_name": null,
|
8432 |
"trial_params": null
|