Training in progress, step 900, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 335604696
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d7637002d09c40b1ff0dde4fed8f25c0c10ecf2fd2644537a51a991a1f84921f
|
3 |
size 335604696
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 671364602
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3b4567780ea1fb9785e7a47fafcbc19f29af05375e9930a73b1c3889ee832a61
|
3 |
size 671364602
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6025f10f8dc28cf2aa71e4cc9e5baccca384a12c168c6830134648dd0fa9a65
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d9398917891da1c3e1a39a59a3fc310e3fedd16aa4e461277171952f5d6ea135
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch":
|
5 |
"eval_steps": 100,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -4971,6 +4971,1422 @@
|
|
4971 |
"eval_samples_per_second": 4.832,
|
4972 |
"eval_steps_per_second": 4.832,
|
4973 |
"step": 700
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4974 |
}
|
4975 |
],
|
4976 |
"logging_steps": 1,
|
@@ -4990,7 +6406,7 @@
|
|
4990 |
"attributes": {}
|
4991 |
}
|
4992 |
},
|
4993 |
-
"total_flos": 1.
|
4994 |
"train_batch_size": 1,
|
4995 |
"trial_name": null,
|
4996 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 4.536862003780718,
|
5 |
"eval_steps": 100,
|
6 |
+
"global_step": 900,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
4971 |
"eval_samples_per_second": 4.832,
|
4972 |
"eval_steps_per_second": 4.832,
|
4973 |
"step": 700
|
4974 |
+
},
|
4975 |
+
{
|
4976 |
+
"epoch": 3.5337114051669816,
|
4977 |
+
"grad_norm": 1.904463529586792,
|
4978 |
+
"learning_rate": 4.252570523079852e-05,
|
4979 |
+
"loss": 0.4621,
|
4980 |
+
"step": 701
|
4981 |
+
},
|
4982 |
+
{
|
4983 |
+
"epoch": 3.5387523629489603,
|
4984 |
+
"grad_norm": 2.9819843769073486,
|
4985 |
+
"learning_rate": 4.2263667111118074e-05,
|
4986 |
+
"loss": 0.4595,
|
4987 |
+
"step": 702
|
4988 |
+
},
|
4989 |
+
{
|
4990 |
+
"epoch": 3.5437933207309387,
|
4991 |
+
"grad_norm": 2.181044578552246,
|
4992 |
+
"learning_rate": 4.2002222321588767e-05,
|
4993 |
+
"loss": 0.4761,
|
4994 |
+
"step": 703
|
4995 |
+
},
|
4996 |
+
{
|
4997 |
+
"epoch": 3.5488342785129174,
|
4998 |
+
"grad_norm": 3.2070817947387695,
|
4999 |
+
"learning_rate": 4.174137354896039e-05,
|
5000 |
+
"loss": 0.5459,
|
5001 |
+
"step": 704
|
5002 |
+
},
|
5003 |
+
{
|
5004 |
+
"epoch": 3.553875236294896,
|
5005 |
+
"grad_norm": 2.638803720474243,
|
5006 |
+
"learning_rate": 4.148112347385762e-05,
|
5007 |
+
"loss": 0.4924,
|
5008 |
+
"step": 705
|
5009 |
+
},
|
5010 |
+
{
|
5011 |
+
"epoch": 3.5589161940768745,
|
5012 |
+
"grad_norm": 2.7156784534454346,
|
5013 |
+
"learning_rate": 4.12214747707527e-05,
|
5014 |
+
"loss": 0.7368,
|
5015 |
+
"step": 706
|
5016 |
+
},
|
5017 |
+
{
|
5018 |
+
"epoch": 3.5639571518588533,
|
5019 |
+
"grad_norm": 3.3311760425567627,
|
5020 |
+
"learning_rate": 4.096243010793774e-05,
|
5021 |
+
"loss": 0.3963,
|
5022 |
+
"step": 707
|
5023 |
+
},
|
5024 |
+
{
|
5025 |
+
"epoch": 3.5689981096408316,
|
5026 |
+
"grad_norm": 2.9713871479034424,
|
5027 |
+
"learning_rate": 4.0703992147497425e-05,
|
5028 |
+
"loss": 0.7188,
|
5029 |
+
"step": 708
|
5030 |
+
},
|
5031 |
+
{
|
5032 |
+
"epoch": 3.5740390674228104,
|
5033 |
+
"grad_norm": 1.7167043685913086,
|
5034 |
+
"learning_rate": 4.0446163545281733e-05,
|
5035 |
+
"loss": 0.3642,
|
5036 |
+
"step": 709
|
5037 |
+
},
|
5038 |
+
{
|
5039 |
+
"epoch": 3.5790800252047887,
|
5040 |
+
"grad_norm": 2.598292827606201,
|
5041 |
+
"learning_rate": 4.0188946950878404e-05,
|
5042 |
+
"loss": 0.5682,
|
5043 |
+
"step": 710
|
5044 |
+
},
|
5045 |
+
{
|
5046 |
+
"epoch": 3.5841209829867675,
|
5047 |
+
"grad_norm": 2.3792648315429688,
|
5048 |
+
"learning_rate": 3.9932345007585966e-05,
|
5049 |
+
"loss": 0.5006,
|
5050 |
+
"step": 711
|
5051 |
+
},
|
5052 |
+
{
|
5053 |
+
"epoch": 3.5891619407687463,
|
5054 |
+
"grad_norm": 2.329403877258301,
|
5055 |
+
"learning_rate": 3.9676360352386356e-05,
|
5056 |
+
"loss": 0.5454,
|
5057 |
+
"step": 712
|
5058 |
+
},
|
5059 |
+
{
|
5060 |
+
"epoch": 3.5942028985507246,
|
5061 |
+
"grad_norm": 2.2515058517456055,
|
5062 |
+
"learning_rate": 3.942099561591802e-05,
|
5063 |
+
"loss": 0.593,
|
5064 |
+
"step": 713
|
5065 |
+
},
|
5066 |
+
{
|
5067 |
+
"epoch": 3.599243856332703,
|
5068 |
+
"grad_norm": 2.5386974811553955,
|
5069 |
+
"learning_rate": 3.9166253422448686e-05,
|
5070 |
+
"loss": 0.4879,
|
5071 |
+
"step": 714
|
5072 |
+
},
|
5073 |
+
{
|
5074 |
+
"epoch": 3.6042848141146817,
|
5075 |
+
"grad_norm": 2.0065455436706543,
|
5076 |
+
"learning_rate": 3.8912136389848576e-05,
|
5077 |
+
"loss": 0.3789,
|
5078 |
+
"step": 715
|
5079 |
+
},
|
5080 |
+
{
|
5081 |
+
"epoch": 3.6093257718966605,
|
5082 |
+
"grad_norm": 3.586135149002075,
|
5083 |
+
"learning_rate": 3.8658647129563364e-05,
|
5084 |
+
"loss": 0.6077,
|
5085 |
+
"step": 716
|
5086 |
+
},
|
5087 |
+
{
|
5088 |
+
"epoch": 3.614366729678639,
|
5089 |
+
"grad_norm": 2.020395278930664,
|
5090 |
+
"learning_rate": 3.840578824658735e-05,
|
5091 |
+
"loss": 0.4066,
|
5092 |
+
"step": 717
|
5093 |
+
},
|
5094 |
+
{
|
5095 |
+
"epoch": 3.6194076874606176,
|
5096 |
+
"grad_norm": 3.362459182739258,
|
5097 |
+
"learning_rate": 3.8153562339436855e-05,
|
5098 |
+
"loss": 0.7878,
|
5099 |
+
"step": 718
|
5100 |
+
},
|
5101 |
+
{
|
5102 |
+
"epoch": 3.6244486452425964,
|
5103 |
+
"grad_norm": 2.628788948059082,
|
5104 |
+
"learning_rate": 3.7901972000123275e-05,
|
5105 |
+
"loss": 0.4706,
|
5106 |
+
"step": 719
|
5107 |
+
},
|
5108 |
+
{
|
5109 |
+
"epoch": 3.6294896030245747,
|
5110 |
+
"grad_norm": 2.5555994510650635,
|
5111 |
+
"learning_rate": 3.7651019814126654e-05,
|
5112 |
+
"loss": 0.5187,
|
5113 |
+
"step": 720
|
5114 |
+
},
|
5115 |
+
{
|
5116 |
+
"epoch": 3.634530560806553,
|
5117 |
+
"grad_norm": 2.163494825363159,
|
5118 |
+
"learning_rate": 3.740070836036893e-05,
|
5119 |
+
"loss": 0.4981,
|
5120 |
+
"step": 721
|
5121 |
+
},
|
5122 |
+
{
|
5123 |
+
"epoch": 3.639571518588532,
|
5124 |
+
"grad_norm": 2.215010643005371,
|
5125 |
+
"learning_rate": 3.7151040211187635e-05,
|
5126 |
+
"loss": 0.4935,
|
5127 |
+
"step": 722
|
5128 |
+
},
|
5129 |
+
{
|
5130 |
+
"epoch": 3.6446124763705106,
|
5131 |
+
"grad_norm": 1.8970359563827515,
|
5132 |
+
"learning_rate": 3.690201793230924e-05,
|
5133 |
+
"loss": 0.4363,
|
5134 |
+
"step": 723
|
5135 |
+
},
|
5136 |
+
{
|
5137 |
+
"epoch": 3.649653434152489,
|
5138 |
+
"grad_norm": 2.3382508754730225,
|
5139 |
+
"learning_rate": 3.665364408282305e-05,
|
5140 |
+
"loss": 0.5099,
|
5141 |
+
"step": 724
|
5142 |
+
},
|
5143 |
+
{
|
5144 |
+
"epoch": 3.6546943919344677,
|
5145 |
+
"grad_norm": 3.072990894317627,
|
5146 |
+
"learning_rate": 3.6405921215154494e-05,
|
5147 |
+
"loss": 0.426,
|
5148 |
+
"step": 725
|
5149 |
+
},
|
5150 |
+
{
|
5151 |
+
"epoch": 3.659735349716446,
|
5152 |
+
"grad_norm": 2.7968897819519043,
|
5153 |
+
"learning_rate": 3.615885187503946e-05,
|
5154 |
+
"loss": 0.4038,
|
5155 |
+
"step": 726
|
5156 |
+
},
|
5157 |
+
{
|
5158 |
+
"epoch": 3.6647763074984248,
|
5159 |
+
"grad_norm": 2.408327102661133,
|
5160 |
+
"learning_rate": 3.591243860149759e-05,
|
5161 |
+
"loss": 0.5611,
|
5162 |
+
"step": 727
|
5163 |
+
},
|
5164 |
+
{
|
5165 |
+
"epoch": 3.669817265280403,
|
5166 |
+
"grad_norm": 2.1298022270202637,
|
5167 |
+
"learning_rate": 3.566668392680662e-05,
|
5168 |
+
"loss": 0.5358,
|
5169 |
+
"step": 728
|
5170 |
+
},
|
5171 |
+
{
|
5172 |
+
"epoch": 3.674858223062382,
|
5173 |
+
"grad_norm": 2.271458864212036,
|
5174 |
+
"learning_rate": 3.5421590376475976e-05,
|
5175 |
+
"loss": 0.5573,
|
5176 |
+
"step": 729
|
5177 |
+
},
|
5178 |
+
{
|
5179 |
+
"epoch": 3.6798991808443606,
|
5180 |
+
"grad_norm": 2.6000235080718994,
|
5181 |
+
"learning_rate": 3.517716046922118e-05,
|
5182 |
+
"loss": 0.604,
|
5183 |
+
"step": 730
|
5184 |
+
},
|
5185 |
+
{
|
5186 |
+
"epoch": 3.684940138626339,
|
5187 |
+
"grad_norm": 4.398734092712402,
|
5188 |
+
"learning_rate": 3.493339671693765e-05,
|
5189 |
+
"loss": 0.8516,
|
5190 |
+
"step": 731
|
5191 |
+
},
|
5192 |
+
{
|
5193 |
+
"epoch": 3.6899810964083177,
|
5194 |
+
"grad_norm": 2.618332862854004,
|
5195 |
+
"learning_rate": 3.469030162467513e-05,
|
5196 |
+
"loss": 0.5842,
|
5197 |
+
"step": 732
|
5198 |
+
},
|
5199 |
+
{
|
5200 |
+
"epoch": 3.695022054190296,
|
5201 |
+
"grad_norm": 2.398754835128784,
|
5202 |
+
"learning_rate": 3.4447877690611864e-05,
|
5203 |
+
"loss": 0.5948,
|
5204 |
+
"step": 733
|
5205 |
+
},
|
5206 |
+
{
|
5207 |
+
"epoch": 3.700063011972275,
|
5208 |
+
"grad_norm": 2.336280584335327,
|
5209 |
+
"learning_rate": 3.4206127406028745e-05,
|
5210 |
+
"loss": 0.5372,
|
5211 |
+
"step": 734
|
5212 |
+
},
|
5213 |
+
{
|
5214 |
+
"epoch": 3.705103969754253,
|
5215 |
+
"grad_norm": 2.154177665710449,
|
5216 |
+
"learning_rate": 3.3965053255284084e-05,
|
5217 |
+
"loss": 0.2975,
|
5218 |
+
"step": 735
|
5219 |
+
},
|
5220 |
+
{
|
5221 |
+
"epoch": 3.710144927536232,
|
5222 |
+
"grad_norm": 3.1699931621551514,
|
5223 |
+
"learning_rate": 3.372465771578771e-05,
|
5224 |
+
"loss": 0.6722,
|
5225 |
+
"step": 736
|
5226 |
+
},
|
5227 |
+
{
|
5228 |
+
"epoch": 3.7151858853182107,
|
5229 |
+
"grad_norm": 1.8925341367721558,
|
5230 |
+
"learning_rate": 3.348494325797579e-05,
|
5231 |
+
"loss": 0.3454,
|
5232 |
+
"step": 737
|
5233 |
+
},
|
5234 |
+
{
|
5235 |
+
"epoch": 3.720226843100189,
|
5236 |
+
"grad_norm": 2.5367252826690674,
|
5237 |
+
"learning_rate": 3.32459123452852e-05,
|
5238 |
+
"loss": 0.6358,
|
5239 |
+
"step": 738
|
5240 |
+
},
|
5241 |
+
{
|
5242 |
+
"epoch": 3.7252678008821674,
|
5243 |
+
"grad_norm": 2.3363893032073975,
|
5244 |
+
"learning_rate": 3.30075674341285e-05,
|
5245 |
+
"loss": 0.5481,
|
5246 |
+
"step": 739
|
5247 |
+
},
|
5248 |
+
{
|
5249 |
+
"epoch": 3.730308758664146,
|
5250 |
+
"grad_norm": 3.2516896724700928,
|
5251 |
+
"learning_rate": 3.276991097386831e-05,
|
5252 |
+
"loss": 0.6553,
|
5253 |
+
"step": 740
|
5254 |
+
},
|
5255 |
+
{
|
5256 |
+
"epoch": 3.735349716446125,
|
5257 |
+
"grad_norm": 2.5481348037719727,
|
5258 |
+
"learning_rate": 3.253294540679257e-05,
|
5259 |
+
"loss": 0.5474,
|
5260 |
+
"step": 741
|
5261 |
+
},
|
5262 |
+
{
|
5263 |
+
"epoch": 3.7403906742281032,
|
5264 |
+
"grad_norm": 2.2866156101226807,
|
5265 |
+
"learning_rate": 3.229667316808908e-05,
|
5266 |
+
"loss": 0.5735,
|
5267 |
+
"step": 742
|
5268 |
+
},
|
5269 |
+
{
|
5270 |
+
"epoch": 3.745431632010082,
|
5271 |
+
"grad_norm": 2.344595193862915,
|
5272 |
+
"learning_rate": 3.20610966858206e-05,
|
5273 |
+
"loss": 0.515,
|
5274 |
+
"step": 743
|
5275 |
+
},
|
5276 |
+
{
|
5277 |
+
"epoch": 3.7504725897920603,
|
5278 |
+
"grad_norm": 3.5799601078033447,
|
5279 |
+
"learning_rate": 3.1826218380900064e-05,
|
5280 |
+
"loss": 0.5213,
|
5281 |
+
"step": 744
|
5282 |
+
},
|
5283 |
+
{
|
5284 |
+
"epoch": 3.755513547574039,
|
5285 |
+
"grad_norm": 2.1822752952575684,
|
5286 |
+
"learning_rate": 3.159204066706539e-05,
|
5287 |
+
"loss": 0.435,
|
5288 |
+
"step": 745
|
5289 |
+
},
|
5290 |
+
{
|
5291 |
+
"epoch": 3.7605545053560174,
|
5292 |
+
"grad_norm": 2.42189621925354,
|
5293 |
+
"learning_rate": 3.135856595085498e-05,
|
5294 |
+
"loss": 0.6087,
|
5295 |
+
"step": 746
|
5296 |
+
},
|
5297 |
+
{
|
5298 |
+
"epoch": 3.765595463137996,
|
5299 |
+
"grad_norm": 11.452091217041016,
|
5300 |
+
"learning_rate": 3.1125796631582706e-05,
|
5301 |
+
"loss": 0.4947,
|
5302 |
+
"step": 747
|
5303 |
+
},
|
5304 |
+
{
|
5305 |
+
"epoch": 3.770636420919975,
|
5306 |
+
"grad_norm": 2.2891154289245605,
|
5307 |
+
"learning_rate": 3.089373510131354e-05,
|
5308 |
+
"loss": 0.4724,
|
5309 |
+
"step": 748
|
5310 |
+
},
|
5311 |
+
{
|
5312 |
+
"epoch": 3.7756773787019533,
|
5313 |
+
"grad_norm": 2.7829198837280273,
|
5314 |
+
"learning_rate": 3.066238374483869e-05,
|
5315 |
+
"loss": 0.6174,
|
5316 |
+
"step": 749
|
5317 |
+
},
|
5318 |
+
{
|
5319 |
+
"epoch": 3.780718336483932,
|
5320 |
+
"grad_norm": 2.2421112060546875,
|
5321 |
+
"learning_rate": 3.0431744939651364e-05,
|
5322 |
+
"loss": 0.6198,
|
5323 |
+
"step": 750
|
5324 |
+
},
|
5325 |
+
{
|
5326 |
+
"epoch": 3.7857592942659104,
|
5327 |
+
"grad_norm": 2.286048173904419,
|
5328 |
+
"learning_rate": 3.0201821055922098e-05,
|
5329 |
+
"loss": 0.4045,
|
5330 |
+
"step": 751
|
5331 |
+
},
|
5332 |
+
{
|
5333 |
+
"epoch": 3.790800252047889,
|
5334 |
+
"grad_norm": 2.8162734508514404,
|
5335 |
+
"learning_rate": 2.9972614456474536e-05,
|
5336 |
+
"loss": 0.4992,
|
5337 |
+
"step": 752
|
5338 |
+
},
|
5339 |
+
{
|
5340 |
+
"epoch": 3.7958412098298675,
|
5341 |
+
"grad_norm": 2.0561976432800293,
|
5342 |
+
"learning_rate": 2.974412749676109e-05,
|
5343 |
+
"loss": 0.4656,
|
5344 |
+
"step": 753
|
5345 |
+
},
|
5346 |
+
{
|
5347 |
+
"epoch": 3.8008821676118463,
|
5348 |
+
"grad_norm": 2.390935182571411,
|
5349 |
+
"learning_rate": 2.9516362524838846e-05,
|
5350 |
+
"loss": 0.4937,
|
5351 |
+
"step": 754
|
5352 |
+
},
|
5353 |
+
{
|
5354 |
+
"epoch": 3.805923125393825,
|
5355 |
+
"grad_norm": 2.4606690406799316,
|
5356 |
+
"learning_rate": 2.9289321881345254e-05,
|
5357 |
+
"loss": 0.5145,
|
5358 |
+
"step": 755
|
5359 |
+
},
|
5360 |
+
{
|
5361 |
+
"epoch": 3.8109640831758034,
|
5362 |
+
"grad_norm": 2.025035858154297,
|
5363 |
+
"learning_rate": 2.9063007899474216e-05,
|
5364 |
+
"loss": 0.4321,
|
5365 |
+
"step": 756
|
5366 |
+
},
|
5367 |
+
{
|
5368 |
+
"epoch": 3.8160050409577817,
|
5369 |
+
"grad_norm": 2.5672638416290283,
|
5370 |
+
"learning_rate": 2.8837422904952094e-05,
|
5371 |
+
"loss": 0.5749,
|
5372 |
+
"step": 757
|
5373 |
+
},
|
5374 |
+
{
|
5375 |
+
"epoch": 3.8210459987397605,
|
5376 |
+
"grad_norm": 2.1888957023620605,
|
5377 |
+
"learning_rate": 2.8612569216013672e-05,
|
5378 |
+
"loss": 0.4122,
|
5379 |
+
"step": 758
|
5380 |
+
},
|
5381 |
+
{
|
5382 |
+
"epoch": 3.8260869565217392,
|
5383 |
+
"grad_norm": 1.9208203554153442,
|
5384 |
+
"learning_rate": 2.838844914337857e-05,
|
5385 |
+
"loss": 0.3589,
|
5386 |
+
"step": 759
|
5387 |
+
},
|
5388 |
+
{
|
5389 |
+
"epoch": 3.8311279143037176,
|
5390 |
+
"grad_norm": 2.0264217853546143,
|
5391 |
+
"learning_rate": 2.8165064990227252e-05,
|
5392 |
+
"loss": 0.4746,
|
5393 |
+
"step": 760
|
5394 |
+
},
|
5395 |
+
{
|
5396 |
+
"epoch": 3.8361688720856963,
|
5397 |
+
"grad_norm": 2.400031566619873,
|
5398 |
+
"learning_rate": 2.7942419052177525e-05,
|
5399 |
+
"loss": 0.6351,
|
5400 |
+
"step": 761
|
5401 |
+
},
|
5402 |
+
{
|
5403 |
+
"epoch": 3.841209829867675,
|
5404 |
+
"grad_norm": 4.054795265197754,
|
5405 |
+
"learning_rate": 2.7720513617260856e-05,
|
5406 |
+
"loss": 0.5294,
|
5407 |
+
"step": 762
|
5408 |
+
},
|
5409 |
+
{
|
5410 |
+
"epoch": 3.8462507876496534,
|
5411 |
+
"grad_norm": 2.3282177448272705,
|
5412 |
+
"learning_rate": 2.7499350965898974e-05,
|
5413 |
+
"loss": 0.4307,
|
5414 |
+
"step": 763
|
5415 |
+
},
|
5416 |
+
{
|
5417 |
+
"epoch": 3.8512917454316318,
|
5418 |
+
"grad_norm": 2.6556639671325684,
|
5419 |
+
"learning_rate": 2.7278933370880265e-05,
|
5420 |
+
"loss": 0.596,
|
5421 |
+
"step": 764
|
5422 |
+
},
|
5423 |
+
{
|
5424 |
+
"epoch": 3.8563327032136105,
|
5425 |
+
"grad_norm": 2.5677340030670166,
|
5426 |
+
"learning_rate": 2.7059263097336597e-05,
|
5427 |
+
"loss": 0.6471,
|
5428 |
+
"step": 765
|
5429 |
+
},
|
5430 |
+
{
|
5431 |
+
"epoch": 3.8613736609955893,
|
5432 |
+
"grad_norm": 2.354823350906372,
|
5433 |
+
"learning_rate": 2.6840342402719866e-05,
|
5434 |
+
"loss": 0.4488,
|
5435 |
+
"step": 766
|
5436 |
+
},
|
5437 |
+
{
|
5438 |
+
"epoch": 3.8664146187775676,
|
5439 |
+
"grad_norm": 1.925303339958191,
|
5440 |
+
"learning_rate": 2.662217353677896e-05,
|
5441 |
+
"loss": 0.5354,
|
5442 |
+
"step": 767
|
5443 |
+
},
|
5444 |
+
{
|
5445 |
+
"epoch": 3.8714555765595464,
|
5446 |
+
"grad_norm": 2.55922532081604,
|
5447 |
+
"learning_rate": 2.6404758741536505e-05,
|
5448 |
+
"loss": 0.6549,
|
5449 |
+
"step": 768
|
5450 |
+
},
|
5451 |
+
{
|
5452 |
+
"epoch": 3.8764965343415247,
|
5453 |
+
"grad_norm": 2.768028736114502,
|
5454 |
+
"learning_rate": 2.6188100251265945e-05,
|
5455 |
+
"loss": 0.5541,
|
5456 |
+
"step": 769
|
5457 |
+
},
|
5458 |
+
{
|
5459 |
+
"epoch": 3.8815374921235035,
|
5460 |
+
"grad_norm": 2.2292630672454834,
|
5461 |
+
"learning_rate": 2.5972200292468464e-05,
|
5462 |
+
"loss": 0.5644,
|
5463 |
+
"step": 770
|
5464 |
+
},
|
5465 |
+
{
|
5466 |
+
"epoch": 3.886578449905482,
|
5467 |
+
"grad_norm": 2.137153387069702,
|
5468 |
+
"learning_rate": 2.5757061083850154e-05,
|
5469 |
+
"loss": 0.4963,
|
5470 |
+
"step": 771
|
5471 |
+
},
|
5472 |
+
{
|
5473 |
+
"epoch": 3.8916194076874606,
|
5474 |
+
"grad_norm": 5.6507649421691895,
|
5475 |
+
"learning_rate": 2.5542684836299313e-05,
|
5476 |
+
"loss": 0.585,
|
5477 |
+
"step": 772
|
5478 |
+
},
|
5479 |
+
{
|
5480 |
+
"epoch": 3.8966603654694394,
|
5481 |
+
"grad_norm": 2.6057021617889404,
|
5482 |
+
"learning_rate": 2.5329073752863508e-05,
|
5483 |
+
"loss": 0.6298,
|
5484 |
+
"step": 773
|
5485 |
+
},
|
5486 |
+
{
|
5487 |
+
"epoch": 3.9017013232514177,
|
5488 |
+
"grad_norm": 3.6550450325012207,
|
5489 |
+
"learning_rate": 2.5116230028727184e-05,
|
5490 |
+
"loss": 0.6987,
|
5491 |
+
"step": 774
|
5492 |
+
},
|
5493 |
+
{
|
5494 |
+
"epoch": 3.9067422810333965,
|
5495 |
+
"grad_norm": 2.3518424034118652,
|
5496 |
+
"learning_rate": 2.4904155851188872e-05,
|
5497 |
+
"loss": 0.5109,
|
5498 |
+
"step": 775
|
5499 |
+
},
|
5500 |
+
{
|
5501 |
+
"epoch": 3.911783238815375,
|
5502 |
+
"grad_norm": 2.298001289367676,
|
5503 |
+
"learning_rate": 2.4692853399638917e-05,
|
5504 |
+
"loss": 0.5297,
|
5505 |
+
"step": 776
|
5506 |
+
},
|
5507 |
+
{
|
5508 |
+
"epoch": 3.9168241965973536,
|
5509 |
+
"grad_norm": 3.2843151092529297,
|
5510 |
+
"learning_rate": 2.4482324845536887e-05,
|
5511 |
+
"loss": 0.4722,
|
5512 |
+
"step": 777
|
5513 |
+
},
|
5514 |
+
{
|
5515 |
+
"epoch": 3.921865154379332,
|
5516 |
+
"grad_norm": 2.896202802658081,
|
5517 |
+
"learning_rate": 2.427257235238949e-05,
|
5518 |
+
"loss": 0.6685,
|
5519 |
+
"step": 778
|
5520 |
+
},
|
5521 |
+
{
|
5522 |
+
"epoch": 3.9269061121613107,
|
5523 |
+
"grad_norm": 2.6562740802764893,
|
5524 |
+
"learning_rate": 2.406359807572802e-05,
|
5525 |
+
"loss": 0.4104,
|
5526 |
+
"step": 779
|
5527 |
+
},
|
5528 |
+
{
|
5529 |
+
"epoch": 3.9319470699432895,
|
5530 |
+
"grad_norm": 2.0042037963867188,
|
5531 |
+
"learning_rate": 2.3855404163086558e-05,
|
5532 |
+
"loss": 0.4288,
|
5533 |
+
"step": 780
|
5534 |
+
},
|
5535 |
+
{
|
5536 |
+
"epoch": 3.936988027725268,
|
5537 |
+
"grad_norm": 2.8666815757751465,
|
5538 |
+
"learning_rate": 2.3647992753979696e-05,
|
5539 |
+
"loss": 0.5282,
|
5540 |
+
"step": 781
|
5541 |
+
},
|
5542 |
+
{
|
5543 |
+
"epoch": 3.942028985507246,
|
5544 |
+
"grad_norm": 2.1556613445281982,
|
5545 |
+
"learning_rate": 2.3441365979880524e-05,
|
5546 |
+
"loss": 0.4811,
|
5547 |
+
"step": 782
|
5548 |
+
},
|
5549 |
+
{
|
5550 |
+
"epoch": 3.947069943289225,
|
5551 |
+
"grad_norm": 2.366997718811035,
|
5552 |
+
"learning_rate": 2.323552596419889e-05,
|
5553 |
+
"loss": 0.5873,
|
5554 |
+
"step": 783
|
5555 |
+
},
|
5556 |
+
{
|
5557 |
+
"epoch": 3.9521109010712037,
|
5558 |
+
"grad_norm": 1.9457205533981323,
|
5559 |
+
"learning_rate": 2.3030474822259397e-05,
|
5560 |
+
"loss": 0.3889,
|
5561 |
+
"step": 784
|
5562 |
+
},
|
5563 |
+
{
|
5564 |
+
"epoch": 3.957151858853182,
|
5565 |
+
"grad_norm": 2.5088860988616943,
|
5566 |
+
"learning_rate": 2.282621466127982e-05,
|
5567 |
+
"loss": 0.5879,
|
5568 |
+
"step": 785
|
5569 |
+
},
|
5570 |
+
{
|
5571 |
+
"epoch": 3.9621928166351608,
|
5572 |
+
"grad_norm": 2.1277389526367188,
|
5573 |
+
"learning_rate": 2.2622747580349314e-05,
|
5574 |
+
"loss": 0.5233,
|
5575 |
+
"step": 786
|
5576 |
+
},
|
5577 |
+
{
|
5578 |
+
"epoch": 3.967233774417139,
|
5579 |
+
"grad_norm": 1.8649202585220337,
|
5580 |
+
"learning_rate": 2.2420075670407015e-05,
|
5581 |
+
"loss": 0.4135,
|
5582 |
+
"step": 787
|
5583 |
+
},
|
5584 |
+
{
|
5585 |
+
"epoch": 3.972274732199118,
|
5586 |
+
"grad_norm": 1.8878589868545532,
|
5587 |
+
"learning_rate": 2.2218201014220263e-05,
|
5588 |
+
"loss": 0.3911,
|
5589 |
+
"step": 788
|
5590 |
+
},
|
5591 |
+
{
|
5592 |
+
"epoch": 3.977315689981096,
|
5593 |
+
"grad_norm": 3.219775915145874,
|
5594 |
+
"learning_rate": 2.2017125686363592e-05,
|
5595 |
+
"loss": 0.6087,
|
5596 |
+
"step": 789
|
5597 |
+
},
|
5598 |
+
{
|
5599 |
+
"epoch": 3.982356647763075,
|
5600 |
+
"grad_norm": 2.3779828548431396,
|
5601 |
+
"learning_rate": 2.181685175319702e-05,
|
5602 |
+
"loss": 0.4802,
|
5603 |
+
"step": 790
|
5604 |
+
},
|
5605 |
+
{
|
5606 |
+
"epoch": 3.9873976055450537,
|
5607 |
+
"grad_norm": 2.0869672298431396,
|
5608 |
+
"learning_rate": 2.161738127284517e-05,
|
5609 |
+
"loss": 0.4566,
|
5610 |
+
"step": 791
|
5611 |
+
},
|
5612 |
+
{
|
5613 |
+
"epoch": 3.992438563327032,
|
5614 |
+
"grad_norm": 2.7853212356567383,
|
5615 |
+
"learning_rate": 2.141871629517577e-05,
|
5616 |
+
"loss": 0.5933,
|
5617 |
+
"step": 792
|
5618 |
+
},
|
5619 |
+
{
|
5620 |
+
"epoch": 3.997479521109011,
|
5621 |
+
"grad_norm": 2.1378300189971924,
|
5622 |
+
"learning_rate": 2.1220858861778904e-05,
|
5623 |
+
"loss": 0.4925,
|
5624 |
+
"step": 793
|
5625 |
+
},
|
5626 |
+
{
|
5627 |
+
"epoch": 4.00252047889099,
|
5628 |
+
"grad_norm": 4.350517272949219,
|
5629 |
+
"learning_rate": 2.1023811005945772e-05,
|
5630 |
+
"loss": 0.8567,
|
5631 |
+
"step": 794
|
5632 |
+
},
|
5633 |
+
{
|
5634 |
+
"epoch": 4.007561436672968,
|
5635 |
+
"grad_norm": 1.473480463027954,
|
5636 |
+
"learning_rate": 2.0827574752648038e-05,
|
5637 |
+
"loss": 0.2467,
|
5638 |
+
"step": 795
|
5639 |
+
},
|
5640 |
+
{
|
5641 |
+
"epoch": 4.012602394454946,
|
5642 |
+
"grad_norm": 1.4368691444396973,
|
5643 |
+
"learning_rate": 2.063215211851678e-05,
|
5644 |
+
"loss": 0.231,
|
5645 |
+
"step": 796
|
5646 |
+
},
|
5647 |
+
{
|
5648 |
+
"epoch": 4.017643352236925,
|
5649 |
+
"grad_norm": 1.1591088771820068,
|
5650 |
+
"learning_rate": 2.043754511182191e-05,
|
5651 |
+
"loss": 0.1757,
|
5652 |
+
"step": 797
|
5653 |
+
},
|
5654 |
+
{
|
5655 |
+
"epoch": 4.022684310018904,
|
5656 |
+
"grad_norm": 1.5065808296203613,
|
5657 |
+
"learning_rate": 2.0243755732451563e-05,
|
5658 |
+
"loss": 0.3039,
|
5659 |
+
"step": 798
|
5660 |
+
},
|
5661 |
+
{
|
5662 |
+
"epoch": 4.027725267800882,
|
5663 |
+
"grad_norm": 1.6391133069992065,
|
5664 |
+
"learning_rate": 2.0050785971891404e-05,
|
5665 |
+
"loss": 0.3098,
|
5666 |
+
"step": 799
|
5667 |
+
},
|
5668 |
+
{
|
5669 |
+
"epoch": 4.0327662255828605,
|
5670 |
+
"grad_norm": 1.5096096992492676,
|
5671 |
+
"learning_rate": 1.985863781320435e-05,
|
5672 |
+
"loss": 0.2365,
|
5673 |
+
"step": 800
|
5674 |
+
},
|
5675 |
+
{
|
5676 |
+
"epoch": 4.0327662255828605,
|
5677 |
+
"eval_loss": 3.012767791748047,
|
5678 |
+
"eval_runtime": 17.3958,
|
5679 |
+
"eval_samples_per_second": 4.829,
|
5680 |
+
"eval_steps_per_second": 4.829,
|
5681 |
+
"step": 800
|
5682 |
+
},
|
5683 |
+
{
|
5684 |
+
"epoch": 4.03780718336484,
|
5685 |
+
"grad_norm": 1.5071699619293213,
|
5686 |
+
"learning_rate": 1.9667313231009953e-05,
|
5687 |
+
"loss": 0.2235,
|
5688 |
+
"step": 801
|
5689 |
+
},
|
5690 |
+
{
|
5691 |
+
"epoch": 4.042848141146818,
|
5692 |
+
"grad_norm": 1.2665601968765259,
|
5693 |
+
"learning_rate": 1.947681419146439e-05,
|
5694 |
+
"loss": 0.2058,
|
5695 |
+
"step": 802
|
5696 |
+
},
|
5697 |
+
{
|
5698 |
+
"epoch": 4.047889098928796,
|
5699 |
+
"grad_norm": 1.5283819437026978,
|
5700 |
+
"learning_rate": 1.9287142652239966e-05,
|
5701 |
+
"loss": 0.2866,
|
5702 |
+
"step": 803
|
5703 |
+
},
|
5704 |
+
{
|
5705 |
+
"epoch": 4.052930056710775,
|
5706 |
+
"grad_norm": 1.5947951078414917,
|
5707 |
+
"learning_rate": 1.9098300562505266e-05,
|
5708 |
+
"loss": 0.1963,
|
5709 |
+
"step": 804
|
5710 |
+
},
|
5711 |
+
{
|
5712 |
+
"epoch": 4.057971014492754,
|
5713 |
+
"grad_norm": 1.650177240371704,
|
5714 |
+
"learning_rate": 1.891028986290492e-05,
|
5715 |
+
"loss": 0.2659,
|
5716 |
+
"step": 805
|
5717 |
+
},
|
5718 |
+
{
|
5719 |
+
"epoch": 4.063011972274732,
|
5720 |
+
"grad_norm": 1.5249379873275757,
|
5721 |
+
"learning_rate": 1.872311248553974e-05,
|
5722 |
+
"loss": 0.2298,
|
5723 |
+
"step": 806
|
5724 |
+
},
|
5725 |
+
{
|
5726 |
+
"epoch": 4.0680529300567105,
|
5727 |
+
"grad_norm": 1.8285279273986816,
|
5728 |
+
"learning_rate": 1.853677035394692e-05,
|
5729 |
+
"loss": 0.2954,
|
5730 |
+
"step": 807
|
5731 |
+
},
|
5732 |
+
{
|
5733 |
+
"epoch": 4.07309388783869,
|
5734 |
+
"grad_norm": 2.1409661769866943,
|
5735 |
+
"learning_rate": 1.835126538308013e-05,
|
5736 |
+
"loss": 0.2479,
|
5737 |
+
"step": 808
|
5738 |
+
},
|
5739 |
+
{
|
5740 |
+
"epoch": 4.078134845620668,
|
5741 |
+
"grad_norm": 1.7945770025253296,
|
5742 |
+
"learning_rate": 1.8166599479290012e-05,
|
5743 |
+
"loss": 0.212,
|
5744 |
+
"step": 809
|
5745 |
+
},
|
5746 |
+
{
|
5747 |
+
"epoch": 4.083175803402646,
|
5748 |
+
"grad_norm": 1.5002671480178833,
|
5749 |
+
"learning_rate": 1.7982774540304403e-05,
|
5750 |
+
"loss": 0.1913,
|
5751 |
+
"step": 810
|
5752 |
+
},
|
5753 |
+
{
|
5754 |
+
"epoch": 4.088216761184625,
|
5755 |
+
"grad_norm": 1.233276128768921,
|
5756 |
+
"learning_rate": 1.7799792455209018e-05,
|
5757 |
+
"loss": 0.1265,
|
5758 |
+
"step": 811
|
5759 |
+
},
|
5760 |
+
{
|
5761 |
+
"epoch": 4.093257718966604,
|
5762 |
+
"grad_norm": 1.5653570890426636,
|
5763 |
+
"learning_rate": 1.7617655104427832e-05,
|
5764 |
+
"loss": 0.1916,
|
5765 |
+
"step": 812
|
5766 |
+
},
|
5767 |
+
{
|
5768 |
+
"epoch": 4.098298676748582,
|
5769 |
+
"grad_norm": 1.9948022365570068,
|
5770 |
+
"learning_rate": 1.7436364359703992e-05,
|
5771 |
+
"loss": 0.2164,
|
5772 |
+
"step": 813
|
5773 |
+
},
|
5774 |
+
{
|
5775 |
+
"epoch": 4.103339634530561,
|
5776 |
+
"grad_norm": 1.7396409511566162,
|
5777 |
+
"learning_rate": 1.725592208408037e-05,
|
5778 |
+
"loss": 0.179,
|
5779 |
+
"step": 814
|
5780 |
+
},
|
5781 |
+
{
|
5782 |
+
"epoch": 4.108380592312539,
|
5783 |
+
"grad_norm": 1.892167091369629,
|
5784 |
+
"learning_rate": 1.7076330131880526e-05,
|
5785 |
+
"loss": 0.1692,
|
5786 |
+
"step": 815
|
5787 |
+
},
|
5788 |
+
{
|
5789 |
+
"epoch": 4.113421550094518,
|
5790 |
+
"grad_norm": 2.3079912662506104,
|
5791 |
+
"learning_rate": 1.689759034868961e-05,
|
5792 |
+
"loss": 0.2436,
|
5793 |
+
"step": 816
|
5794 |
+
},
|
5795 |
+
{
|
5796 |
+
"epoch": 4.1184625078764965,
|
5797 |
+
"grad_norm": 1.813413381576538,
|
5798 |
+
"learning_rate": 1.6719704571335503e-05,
|
5799 |
+
"loss": 0.2619,
|
5800 |
+
"step": 817
|
5801 |
+
},
|
5802 |
+
{
|
5803 |
+
"epoch": 4.123503465658475,
|
5804 |
+
"grad_norm": 1.4480020999908447,
|
5805 |
+
"learning_rate": 1.6542674627869737e-05,
|
5806 |
+
"loss": 0.2073,
|
5807 |
+
"step": 818
|
5808 |
+
},
|
5809 |
+
{
|
5810 |
+
"epoch": 4.128544423440454,
|
5811 |
+
"grad_norm": 2.4162139892578125,
|
5812 |
+
"learning_rate": 1.636650233754895e-05,
|
5813 |
+
"loss": 0.2831,
|
5814 |
+
"step": 819
|
5815 |
+
},
|
5816 |
+
{
|
5817 |
+
"epoch": 4.133585381222432,
|
5818 |
+
"grad_norm": 2.053483009338379,
|
5819 |
+
"learning_rate": 1.619118951081594e-05,
|
5820 |
+
"loss": 0.2556,
|
5821 |
+
"step": 820
|
5822 |
+
},
|
5823 |
+
{
|
5824 |
+
"epoch": 4.138626339004411,
|
5825 |
+
"grad_norm": 1.7280874252319336,
|
5826 |
+
"learning_rate": 1.601673794928127e-05,
|
5827 |
+
"loss": 0.1402,
|
5828 |
+
"step": 821
|
5829 |
+
},
|
5830 |
+
{
|
5831 |
+
"epoch": 4.143667296786389,
|
5832 |
+
"grad_norm": 1.6793662309646606,
|
5833 |
+
"learning_rate": 1.5843149445704686e-05,
|
5834 |
+
"loss": 0.1871,
|
5835 |
+
"step": 822
|
5836 |
+
},
|
5837 |
+
{
|
5838 |
+
"epoch": 4.148708254568368,
|
5839 |
+
"grad_norm": 2.4378581047058105,
|
5840 |
+
"learning_rate": 1.5670425783976582e-05,
|
5841 |
+
"loss": 0.2449,
|
5842 |
+
"step": 823
|
5843 |
+
},
|
5844 |
+
{
|
5845 |
+
"epoch": 4.1537492123503466,
|
5846 |
+
"grad_norm": 2.1296300888061523,
|
5847 |
+
"learning_rate": 1.5498568739099906e-05,
|
5848 |
+
"loss": 0.3243,
|
5849 |
+
"step": 824
|
5850 |
+
},
|
5851 |
+
{
|
5852 |
+
"epoch": 4.158790170132325,
|
5853 |
+
"grad_norm": 2.998354196548462,
|
5854 |
+
"learning_rate": 1.5327580077171587e-05,
|
5855 |
+
"loss": 0.272,
|
5856 |
+
"step": 825
|
5857 |
+
},
|
5858 |
+
{
|
5859 |
+
"epoch": 4.163831127914304,
|
5860 |
+
"grad_norm": 2.1568050384521484,
|
5861 |
+
"learning_rate": 1.5157461555364772e-05,
|
5862 |
+
"loss": 0.2869,
|
5863 |
+
"step": 826
|
5864 |
+
},
|
5865 |
+
{
|
5866 |
+
"epoch": 4.168872085696282,
|
5867 |
+
"grad_norm": 2.672724723815918,
|
5868 |
+
"learning_rate": 1.4988214921910416e-05,
|
5869 |
+
"loss": 0.2842,
|
5870 |
+
"step": 827
|
5871 |
+
},
|
5872 |
+
{
|
5873 |
+
"epoch": 4.173913043478261,
|
5874 |
+
"grad_norm": 3.133556365966797,
|
5875 |
+
"learning_rate": 1.481984191607959e-05,
|
5876 |
+
"loss": 0.4331,
|
5877 |
+
"step": 828
|
5878 |
+
},
|
5879 |
+
{
|
5880 |
+
"epoch": 4.178954001260239,
|
5881 |
+
"grad_norm": 1.9987956285476685,
|
5882 |
+
"learning_rate": 1.4652344268165407e-05,
|
5883 |
+
"loss": 0.2097,
|
5884 |
+
"step": 829
|
5885 |
+
},
|
5886 |
+
{
|
5887 |
+
"epoch": 4.183994959042218,
|
5888 |
+
"grad_norm": 2.313779830932617,
|
5889 |
+
"learning_rate": 1.4485723699465392e-05,
|
5890 |
+
"loss": 0.2143,
|
5891 |
+
"step": 830
|
5892 |
+
},
|
5893 |
+
{
|
5894 |
+
"epoch": 4.189035916824197,
|
5895 |
+
"grad_norm": 1.4090917110443115,
|
5896 |
+
"learning_rate": 1.4319981922263637e-05,
|
5897 |
+
"loss": 0.1067,
|
5898 |
+
"step": 831
|
5899 |
+
},
|
5900 |
+
{
|
5901 |
+
"epoch": 4.194076874606175,
|
5902 |
+
"grad_norm": 1.9386868476867676,
|
5903 |
+
"learning_rate": 1.415512063981339e-05,
|
5904 |
+
"loss": 0.2219,
|
5905 |
+
"step": 832
|
5906 |
+
},
|
5907 |
+
{
|
5908 |
+
"epoch": 4.199117832388154,
|
5909 |
+
"grad_norm": 1.5428823232650757,
|
5910 |
+
"learning_rate": 1.3991141546319386e-05,
|
5911 |
+
"loss": 0.1661,
|
5912 |
+
"step": 833
|
5913 |
+
},
|
5914 |
+
{
|
5915 |
+
"epoch": 4.2041587901701325,
|
5916 |
+
"grad_norm": 2.226743459701538,
|
5917 |
+
"learning_rate": 1.3828046326920497e-05,
|
5918 |
+
"loss": 0.1996,
|
5919 |
+
"step": 834
|
5920 |
+
},
|
5921 |
+
{
|
5922 |
+
"epoch": 4.209199747952111,
|
5923 |
+
"grad_norm": 1.9956004619598389,
|
5924 |
+
"learning_rate": 1.3665836657672493e-05,
|
5925 |
+
"loss": 0.2192,
|
5926 |
+
"step": 835
|
5927 |
+
},
|
5928 |
+
{
|
5929 |
+
"epoch": 4.214240705734089,
|
5930 |
+
"grad_norm": 2.2574939727783203,
|
5931 |
+
"learning_rate": 1.350451420553065e-05,
|
5932 |
+
"loss": 0.2617,
|
5933 |
+
"step": 836
|
5934 |
+
},
|
5935 |
+
{
|
5936 |
+
"epoch": 4.219281663516068,
|
5937 |
+
"grad_norm": 1.7005239725112915,
|
5938 |
+
"learning_rate": 1.33440806283328e-05,
|
5939 |
+
"loss": 0.1683,
|
5940 |
+
"step": 837
|
5941 |
+
},
|
5942 |
+
{
|
5943 |
+
"epoch": 4.224322621298047,
|
5944 |
+
"grad_norm": 2.081350088119507,
|
5945 |
+
"learning_rate": 1.3184537574782152e-05,
|
5946 |
+
"loss": 0.2354,
|
5947 |
+
"step": 838
|
5948 |
+
},
|
5949 |
+
{
|
5950 |
+
"epoch": 4.229363579080025,
|
5951 |
+
"grad_norm": 2.5405027866363525,
|
5952 |
+
"learning_rate": 1.3025886684430467e-05,
|
5953 |
+
"loss": 0.4283,
|
5954 |
+
"step": 839
|
5955 |
+
},
|
5956 |
+
{
|
5957 |
+
"epoch": 4.234404536862003,
|
5958 |
+
"grad_norm": 1.4173089265823364,
|
5959 |
+
"learning_rate": 1.286812958766106e-05,
|
5960 |
+
"loss": 0.1457,
|
5961 |
+
"step": 840
|
5962 |
+
},
|
5963 |
+
{
|
5964 |
+
"epoch": 4.239445494643983,
|
5965 |
+
"grad_norm": 1.8977196216583252,
|
5966 |
+
"learning_rate": 1.2711267905672231e-05,
|
5967 |
+
"loss": 0.3162,
|
5968 |
+
"step": 841
|
5969 |
+
},
|
5970 |
+
{
|
5971 |
+
"epoch": 4.244486452425961,
|
5972 |
+
"grad_norm": 1.7146344184875488,
|
5973 |
+
"learning_rate": 1.2555303250460437e-05,
|
5974 |
+
"loss": 0.238,
|
5975 |
+
"step": 842
|
5976 |
+
},
|
5977 |
+
{
|
5978 |
+
"epoch": 4.249527410207939,
|
5979 |
+
"grad_norm": 1.6202746629714966,
|
5980 |
+
"learning_rate": 1.2400237224803835e-05,
|
5981 |
+
"loss": 0.1741,
|
5982 |
+
"step": 843
|
5983 |
+
},
|
5984 |
+
{
|
5985 |
+
"epoch": 4.254568367989918,
|
5986 |
+
"grad_norm": 2.0077598094940186,
|
5987 |
+
"learning_rate": 1.224607142224572e-05,
|
5988 |
+
"loss": 0.229,
|
5989 |
+
"step": 844
|
5990 |
+
},
|
5991 |
+
{
|
5992 |
+
"epoch": 4.259609325771897,
|
5993 |
+
"grad_norm": 2.2346436977386475,
|
5994 |
+
"learning_rate": 1.2092807427078279e-05,
|
5995 |
+
"loss": 0.2147,
|
5996 |
+
"step": 845
|
5997 |
+
},
|
5998 |
+
{
|
5999 |
+
"epoch": 4.264650283553875,
|
6000 |
+
"grad_norm": 2.1456286907196045,
|
6001 |
+
"learning_rate": 1.19404468143262e-05,
|
6002 |
+
"loss": 0.296,
|
6003 |
+
"step": 846
|
6004 |
+
},
|
6005 |
+
{
|
6006 |
+
"epoch": 4.269691241335853,
|
6007 |
+
"grad_norm": 3.3691720962524414,
|
6008 |
+
"learning_rate": 1.1788991149730499e-05,
|
6009 |
+
"loss": 0.2103,
|
6010 |
+
"step": 847
|
6011 |
+
},
|
6012 |
+
{
|
6013 |
+
"epoch": 4.274732199117833,
|
6014 |
+
"grad_norm": 2.239535331726074,
|
6015 |
+
"learning_rate": 1.1638441989732473e-05,
|
6016 |
+
"loss": 0.2532,
|
6017 |
+
"step": 848
|
6018 |
+
},
|
6019 |
+
{
|
6020 |
+
"epoch": 4.279773156899811,
|
6021 |
+
"grad_norm": 1.5389206409454346,
|
6022 |
+
"learning_rate": 1.1488800881457651e-05,
|
6023 |
+
"loss": 0.1946,
|
6024 |
+
"step": 849
|
6025 |
+
},
|
6026 |
+
{
|
6027 |
+
"epoch": 4.284814114681789,
|
6028 |
+
"grad_norm": 1.961316704750061,
|
6029 |
+
"learning_rate": 1.134006936269999e-05,
|
6030 |
+
"loss": 0.2405,
|
6031 |
+
"step": 850
|
6032 |
+
},
|
6033 |
+
{
|
6034 |
+
"epoch": 4.2898550724637685,
|
6035 |
+
"grad_norm": 1.7677733898162842,
|
6036 |
+
"learning_rate": 1.1192248961905949e-05,
|
6037 |
+
"loss": 0.24,
|
6038 |
+
"step": 851
|
6039 |
+
},
|
6040 |
+
{
|
6041 |
+
"epoch": 4.294896030245747,
|
6042 |
+
"grad_norm": 2.160717248916626,
|
6043 |
+
"learning_rate": 1.1045341198158831e-05,
|
6044 |
+
"loss": 0.2534,
|
6045 |
+
"step": 852
|
6046 |
+
},
|
6047 |
+
{
|
6048 |
+
"epoch": 4.299936988027725,
|
6049 |
+
"grad_norm": 1.4461027383804321,
|
6050 |
+
"learning_rate": 1.0899347581163221e-05,
|
6051 |
+
"loss": 0.1449,
|
6052 |
+
"step": 853
|
6053 |
+
},
|
6054 |
+
{
|
6055 |
+
"epoch": 4.3049779458097035,
|
6056 |
+
"grad_norm": 2.0098049640655518,
|
6057 |
+
"learning_rate": 1.0754269611229429e-05,
|
6058 |
+
"loss": 0.2019,
|
6059 |
+
"step": 854
|
6060 |
+
},
|
6061 |
+
{
|
6062 |
+
"epoch": 4.310018903591683,
|
6063 |
+
"grad_norm": 1.4779269695281982,
|
6064 |
+
"learning_rate": 1.0610108779258044e-05,
|
6065 |
+
"loss": 0.1673,
|
6066 |
+
"step": 855
|
6067 |
+
},
|
6068 |
+
{
|
6069 |
+
"epoch": 4.315059861373661,
|
6070 |
+
"grad_norm": 2.103637933731079,
|
6071 |
+
"learning_rate": 1.0466866566724698e-05,
|
6072 |
+
"loss": 0.2769,
|
6073 |
+
"step": 856
|
6074 |
+
},
|
6075 |
+
{
|
6076 |
+
"epoch": 4.320100819155639,
|
6077 |
+
"grad_norm": 1.9207972288131714,
|
6078 |
+
"learning_rate": 1.0324544445664697e-05,
|
6079 |
+
"loss": 0.2315,
|
6080 |
+
"step": 857
|
6081 |
+
},
|
6082 |
+
{
|
6083 |
+
"epoch": 4.325141776937619,
|
6084 |
+
"grad_norm": 2.1382949352264404,
|
6085 |
+
"learning_rate": 1.0183143878658096e-05,
|
6086 |
+
"loss": 0.3158,
|
6087 |
+
"step": 858
|
6088 |
+
},
|
6089 |
+
{
|
6090 |
+
"epoch": 4.330182734719597,
|
6091 |
+
"grad_norm": 2.0968329906463623,
|
6092 |
+
"learning_rate": 1.0042666318814465e-05,
|
6093 |
+
"loss": 0.2156,
|
6094 |
+
"step": 859
|
6095 |
+
},
|
6096 |
+
{
|
6097 |
+
"epoch": 4.335223692501575,
|
6098 |
+
"grad_norm": 1.8089754581451416,
|
6099 |
+
"learning_rate": 9.903113209758096e-06,
|
6100 |
+
"loss": 0.1696,
|
6101 |
+
"step": 860
|
6102 |
+
},
|
6103 |
+
{
|
6104 |
+
"epoch": 4.340264650283554,
|
6105 |
+
"grad_norm": 1.6891980171203613,
|
6106 |
+
"learning_rate": 9.764485985613092e-06,
|
6107 |
+
"loss": 0.1831,
|
6108 |
+
"step": 861
|
6109 |
+
},
|
6110 |
+
{
|
6111 |
+
"epoch": 4.345305608065533,
|
6112 |
+
"grad_norm": 1.835351586341858,
|
6113 |
+
"learning_rate": 9.626786070988658e-06,
|
6114 |
+
"loss": 0.1941,
|
6115 |
+
"step": 862
|
6116 |
+
},
|
6117 |
+
{
|
6118 |
+
"epoch": 4.350346565847511,
|
6119 |
+
"grad_norm": 2.2840046882629395,
|
6120 |
+
"learning_rate": 9.490014880964503e-06,
|
6121 |
+
"loss": 0.2948,
|
6122 |
+
"step": 863
|
6123 |
+
},
|
6124 |
+
{
|
6125 |
+
"epoch": 4.355387523629489,
|
6126 |
+
"grad_norm": 2.5431714057922363,
|
6127 |
+
"learning_rate": 9.354173821076184e-06,
|
6128 |
+
"loss": 0.3347,
|
6129 |
+
"step": 864
|
6130 |
+
},
|
6131 |
+
{
|
6132 |
+
"epoch": 4.360428481411468,
|
6133 |
+
"grad_norm": 3.4649643898010254,
|
6134 |
+
"learning_rate": 9.219264287300799e-06,
|
6135 |
+
"loss": 0.2935,
|
6136 |
+
"step": 865
|
6137 |
+
},
|
6138 |
+
{
|
6139 |
+
"epoch": 4.365469439193447,
|
6140 |
+
"grad_norm": 1.9627771377563477,
|
6141 |
+
"learning_rate": 9.085287666042508e-06,
|
6142 |
+
"loss": 0.2774,
|
6143 |
+
"step": 866
|
6144 |
+
},
|
6145 |
+
{
|
6146 |
+
"epoch": 4.370510396975425,
|
6147 |
+
"grad_norm": 2.2434778213500977,
|
6148 |
+
"learning_rate": 8.952245334118414e-06,
|
6149 |
+
"loss": 0.2343,
|
6150 |
+
"step": 867
|
6151 |
+
},
|
6152 |
+
{
|
6153 |
+
"epoch": 4.375551354757404,
|
6154 |
+
"grad_norm": 1.981297492980957,
|
6155 |
+
"learning_rate": 8.820138658744304e-06,
|
6156 |
+
"loss": 0.3132,
|
6157 |
+
"step": 868
|
6158 |
+
},
|
6159 |
+
{
|
6160 |
+
"epoch": 4.380592312539383,
|
6161 |
+
"grad_norm": 1.711285948753357,
|
6162 |
+
"learning_rate": 8.688968997520685e-06,
|
6163 |
+
"loss": 0.2461,
|
6164 |
+
"step": 869
|
6165 |
+
},
|
6166 |
+
{
|
6167 |
+
"epoch": 4.385633270321361,
|
6168 |
+
"grad_norm": 1.5340454578399658,
|
6169 |
+
"learning_rate": 8.558737698418761e-06,
|
6170 |
+
"loss": 0.1259,
|
6171 |
+
"step": 870
|
6172 |
+
},
|
6173 |
+
{
|
6174 |
+
"epoch": 4.3906742281033395,
|
6175 |
+
"grad_norm": 1.8006521463394165,
|
6176 |
+
"learning_rate": 8.429446099766614e-06,
|
6177 |
+
"loss": 0.175,
|
6178 |
+
"step": 871
|
6179 |
+
},
|
6180 |
+
{
|
6181 |
+
"epoch": 4.395715185885318,
|
6182 |
+
"grad_norm": 1.98384690284729,
|
6183 |
+
"learning_rate": 8.301095530235492e-06,
|
6184 |
+
"loss": 0.3221,
|
6185 |
+
"step": 872
|
6186 |
+
},
|
6187 |
+
{
|
6188 |
+
"epoch": 4.400756143667297,
|
6189 |
+
"grad_norm": 1.8499350547790527,
|
6190 |
+
"learning_rate": 8.173687308826062e-06,
|
6191 |
+
"loss": 0.1842,
|
6192 |
+
"step": 873
|
6193 |
+
},
|
6194 |
+
{
|
6195 |
+
"epoch": 4.405797101449275,
|
6196 |
+
"grad_norm": 1.3802666664123535,
|
6197 |
+
"learning_rate": 8.047222744854943e-06,
|
6198 |
+
"loss": 0.1984,
|
6199 |
+
"step": 874
|
6200 |
+
},
|
6201 |
+
{
|
6202 |
+
"epoch": 4.410838059231254,
|
6203 |
+
"grad_norm": 1.8781450986862183,
|
6204 |
+
"learning_rate": 7.921703137941173e-06,
|
6205 |
+
"loss": 0.2289,
|
6206 |
+
"step": 875
|
6207 |
+
},
|
6208 |
+
{
|
6209 |
+
"epoch": 4.415879017013232,
|
6210 |
+
"grad_norm": 1.9210363626480103,
|
6211 |
+
"learning_rate": 7.797129777992952e-06,
|
6212 |
+
"loss": 0.1701,
|
6213 |
+
"step": 876
|
6214 |
+
},
|
6215 |
+
{
|
6216 |
+
"epoch": 4.420919974795211,
|
6217 |
+
"grad_norm": 1.662297010421753,
|
6218 |
+
"learning_rate": 7.67350394519426e-06,
|
6219 |
+
"loss": 0.1515,
|
6220 |
+
"step": 877
|
6221 |
+
},
|
6222 |
+
{
|
6223 |
+
"epoch": 4.42596093257719,
|
6224 |
+
"grad_norm": 1.7319724559783936,
|
6225 |
+
"learning_rate": 7.550826909991859e-06,
|
6226 |
+
"loss": 0.1837,
|
6227 |
+
"step": 878
|
6228 |
+
},
|
6229 |
+
{
|
6230 |
+
"epoch": 4.431001890359168,
|
6231 |
+
"grad_norm": 2.0602517127990723,
|
6232 |
+
"learning_rate": 7.429099933082029e-06,
|
6233 |
+
"loss": 0.2369,
|
6234 |
+
"step": 879
|
6235 |
+
},
|
6236 |
+
{
|
6237 |
+
"epoch": 4.436042848141147,
|
6238 |
+
"grad_norm": 2.26853609085083,
|
6239 |
+
"learning_rate": 7.308324265397836e-06,
|
6240 |
+
"loss": 0.2115,
|
6241 |
+
"step": 880
|
6242 |
+
},
|
6243 |
+
{
|
6244 |
+
"epoch": 4.4410838059231255,
|
6245 |
+
"grad_norm": 1.533276915550232,
|
6246 |
+
"learning_rate": 7.1885011480961164e-06,
|
6247 |
+
"loss": 0.1524,
|
6248 |
+
"step": 881
|
6249 |
+
},
|
6250 |
+
{
|
6251 |
+
"epoch": 4.446124763705104,
|
6252 |
+
"grad_norm": 2.3979432582855225,
|
6253 |
+
"learning_rate": 7.069631812544808e-06,
|
6254 |
+
"loss": 0.2283,
|
6255 |
+
"step": 882
|
6256 |
+
},
|
6257 |
+
{
|
6258 |
+
"epoch": 4.451165721487083,
|
6259 |
+
"grad_norm": 1.7930938005447388,
|
6260 |
+
"learning_rate": 6.951717480310216e-06,
|
6261 |
+
"loss": 0.1852,
|
6262 |
+
"step": 883
|
6263 |
+
},
|
6264 |
+
{
|
6265 |
+
"epoch": 4.456206679269061,
|
6266 |
+
"grad_norm": 1.708565592765808,
|
6267 |
+
"learning_rate": 6.834759363144594e-06,
|
6268 |
+
"loss": 0.1727,
|
6269 |
+
"step": 884
|
6270 |
+
},
|
6271 |
+
{
|
6272 |
+
"epoch": 4.46124763705104,
|
6273 |
+
"grad_norm": 1.9065909385681152,
|
6274 |
+
"learning_rate": 6.718758662973523e-06,
|
6275 |
+
"loss": 0.2448,
|
6276 |
+
"step": 885
|
6277 |
+
},
|
6278 |
+
{
|
6279 |
+
"epoch": 4.466288594833018,
|
6280 |
+
"grad_norm": 2.4388699531555176,
|
6281 |
+
"learning_rate": 6.603716571883689e-06,
|
6282 |
+
"loss": 0.3136,
|
6283 |
+
"step": 886
|
6284 |
+
},
|
6285 |
+
{
|
6286 |
+
"epoch": 4.471329552614997,
|
6287 |
+
"grad_norm": 2.000920057296753,
|
6288 |
+
"learning_rate": 6.489634272110568e-06,
|
6289 |
+
"loss": 0.2552,
|
6290 |
+
"step": 887
|
6291 |
+
},
|
6292 |
+
{
|
6293 |
+
"epoch": 4.4763705103969755,
|
6294 |
+
"grad_norm": 1.498339056968689,
|
6295 |
+
"learning_rate": 6.37651293602628e-06,
|
6296 |
+
"loss": 0.1711,
|
6297 |
+
"step": 888
|
6298 |
+
},
|
6299 |
+
{
|
6300 |
+
"epoch": 4.481411468178954,
|
6301 |
+
"grad_norm": 1.8846991062164307,
|
6302 |
+
"learning_rate": 6.264353726127615e-06,
|
6303 |
+
"loss": 0.1744,
|
6304 |
+
"step": 889
|
6305 |
+
},
|
6306 |
+
{
|
6307 |
+
"epoch": 4.486452425960932,
|
6308 |
+
"grad_norm": 2.567516326904297,
|
6309 |
+
"learning_rate": 6.153157795023956e-06,
|
6310 |
+
"loss": 0.2834,
|
6311 |
+
"step": 890
|
6312 |
+
},
|
6313 |
+
{
|
6314 |
+
"epoch": 4.491493383742911,
|
6315 |
+
"grad_norm": 1.2700456380844116,
|
6316 |
+
"learning_rate": 6.042926285425576e-06,
|
6317 |
+
"loss": 0.118,
|
6318 |
+
"step": 891
|
6319 |
+
},
|
6320 |
+
{
|
6321 |
+
"epoch": 4.49653434152489,
|
6322 |
+
"grad_norm": 1.6995525360107422,
|
6323 |
+
"learning_rate": 5.933660330131752e-06,
|
6324 |
+
"loss": 0.2273,
|
6325 |
+
"step": 892
|
6326 |
+
},
|
6327 |
+
{
|
6328 |
+
"epoch": 4.501575299306868,
|
6329 |
+
"grad_norm": 1.4996402263641357,
|
6330 |
+
"learning_rate": 5.825361052019251e-06,
|
6331 |
+
"loss": 0.1136,
|
6332 |
+
"step": 893
|
6333 |
+
},
|
6334 |
+
{
|
6335 |
+
"epoch": 4.506616257088847,
|
6336 |
+
"grad_norm": 2.7703299522399902,
|
6337 |
+
"learning_rate": 5.718029564030703e-06,
|
6338 |
+
"loss": 0.3086,
|
6339 |
+
"step": 894
|
6340 |
+
},
|
6341 |
+
{
|
6342 |
+
"epoch": 4.511657214870826,
|
6343 |
+
"grad_norm": 1.732616901397705,
|
6344 |
+
"learning_rate": 5.611666969163243e-06,
|
6345 |
+
"loss": 0.2473,
|
6346 |
+
"step": 895
|
6347 |
+
},
|
6348 |
+
{
|
6349 |
+
"epoch": 4.516698172652804,
|
6350 |
+
"grad_norm": 2.3436343669891357,
|
6351 |
+
"learning_rate": 5.506274360457086e-06,
|
6352 |
+
"loss": 0.2513,
|
6353 |
+
"step": 896
|
6354 |
+
},
|
6355 |
+
{
|
6356 |
+
"epoch": 4.521739130434782,
|
6357 |
+
"grad_norm": 1.9714229106903076,
|
6358 |
+
"learning_rate": 5.401852820984321e-06,
|
6359 |
+
"loss": 0.2184,
|
6360 |
+
"step": 897
|
6361 |
+
},
|
6362 |
+
{
|
6363 |
+
"epoch": 4.5267800882167615,
|
6364 |
+
"grad_norm": 2.1938507556915283,
|
6365 |
+
"learning_rate": 5.2984034238378835e-06,
|
6366 |
+
"loss": 0.2632,
|
6367 |
+
"step": 898
|
6368 |
+
},
|
6369 |
+
{
|
6370 |
+
"epoch": 4.53182104599874,
|
6371 |
+
"grad_norm": 1.7139536142349243,
|
6372 |
+
"learning_rate": 5.195927232120335e-06,
|
6373 |
+
"loss": 0.1904,
|
6374 |
+
"step": 899
|
6375 |
+
},
|
6376 |
+
{
|
6377 |
+
"epoch": 4.536862003780718,
|
6378 |
+
"grad_norm": 2.74682879447937,
|
6379 |
+
"learning_rate": 5.094425298933136e-06,
|
6380 |
+
"loss": 0.3664,
|
6381 |
+
"step": 900
|
6382 |
+
},
|
6383 |
+
{
|
6384 |
+
"epoch": 4.536862003780718,
|
6385 |
+
"eval_loss": 3.2092416286468506,
|
6386 |
+
"eval_runtime": 17.3962,
|
6387 |
+
"eval_samples_per_second": 4.829,
|
6388 |
+
"eval_steps_per_second": 4.829,
|
6389 |
+
"step": 900
|
6390 |
}
|
6391 |
],
|
6392 |
"logging_steps": 1,
|
|
|
6406 |
"attributes": {}
|
6407 |
}
|
6408 |
},
|
6409 |
+
"total_flos": 1.678523517370368e+17,
|
6410 |
"train_batch_size": 1,
|
6411 |
"trial_name": null,
|
6412 |
"trial_params": null
|