Training in progress, step 2200, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 500770656
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7f4451577bf82ae4c14fb8b5f6d15593c695f63d1bc7c8c377049e28c0b6f430
|
3 |
size 500770656
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 254918356
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:435f4a73c69232486ea2c5684eb01e7449a2602d9445e4a4dbe0c21719127715
|
3 |
size 254918356
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c6015ab40414177a8cb3a25519cffb5a624e999127e3ac742f7bf693b450cb8e
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e66e55baeee62db229bddf3da45b85b2a91fe7343a6a75e11aba725017a7a321
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": 0.635880708694458,
|
3 |
"best_model_checkpoint": "miner_id_24/checkpoint-1800",
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 200,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -14095,6 +14095,1414 @@
|
|
14095 |
"eval_samples_per_second": 2.508,
|
14096 |
"eval_steps_per_second": 2.508,
|
14097 |
"step": 2000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
14098 |
}
|
14099 |
],
|
14100 |
"logging_steps": 1,
|
@@ -14109,7 +15517,7 @@
|
|
14109 |
"early_stopping_threshold": 0.0
|
14110 |
},
|
14111 |
"attributes": {
|
14112 |
-
"early_stopping_patience_counter":
|
14113 |
}
|
14114 |
},
|
14115 |
"TrainerControl": {
|
@@ -14123,7 +15531,7 @@
|
|
14123 |
"attributes": {}
|
14124 |
}
|
14125 |
},
|
14126 |
-
"total_flos": 3.
|
14127 |
"train_batch_size": 1,
|
14128 |
"trial_name": null,
|
14129 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": 0.635880708694458,
|
3 |
"best_model_checkpoint": "miner_id_24/checkpoint-1800",
|
4 |
+
"epoch": 0.303469204772743,
|
5 |
"eval_steps": 200,
|
6 |
+
"global_step": 2200,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
14095 |
"eval_samples_per_second": 2.508,
|
14096 |
"eval_steps_per_second": 2.508,
|
14097 |
"step": 2000
|
14098 |
+
},
|
14099 |
+
{
|
14100 |
+
"epoch": 0.27601903579557213,
|
14101 |
+
"grad_norm": 0.7976175546646118,
|
14102 |
+
"learning_rate": 0.00019596262479772337,
|
14103 |
+
"loss": 0.8986,
|
14104 |
+
"step": 2001
|
14105 |
+
},
|
14106 |
+
{
|
14107 |
+
"epoch": 0.2761569763431961,
|
14108 |
+
"grad_norm": 0.7986498475074768,
|
14109 |
+
"learning_rate": 0.0001959585548074112,
|
14110 |
+
"loss": 0.6112,
|
14111 |
+
"step": 2002
|
14112 |
+
},
|
14113 |
+
{
|
14114 |
+
"epoch": 0.27629491689082003,
|
14115 |
+
"grad_norm": 0.6366571187973022,
|
14116 |
+
"learning_rate": 0.00019595448280900626,
|
14117 |
+
"loss": 0.4923,
|
14118 |
+
"step": 2003
|
14119 |
+
},
|
14120 |
+
{
|
14121 |
+
"epoch": 0.27643285743844404,
|
14122 |
+
"grad_norm": 0.6254854798316956,
|
14123 |
+
"learning_rate": 0.0001959504088025937,
|
14124 |
+
"loss": 0.3532,
|
14125 |
+
"step": 2004
|
14126 |
+
},
|
14127 |
+
{
|
14128 |
+
"epoch": 0.276570797986068,
|
14129 |
+
"grad_norm": 0.9452973008155823,
|
14130 |
+
"learning_rate": 0.0001959463327882588,
|
14131 |
+
"loss": 1.054,
|
14132 |
+
"step": 2005
|
14133 |
+
},
|
14134 |
+
{
|
14135 |
+
"epoch": 0.276708738533692,
|
14136 |
+
"grad_norm": 0.8644076585769653,
|
14137 |
+
"learning_rate": 0.00019594225476608686,
|
14138 |
+
"loss": 0.6988,
|
14139 |
+
"step": 2006
|
14140 |
+
},
|
14141 |
+
{
|
14142 |
+
"epoch": 0.27684667908131594,
|
14143 |
+
"grad_norm": 0.563485860824585,
|
14144 |
+
"learning_rate": 0.00019593817473616322,
|
14145 |
+
"loss": 0.5089,
|
14146 |
+
"step": 2007
|
14147 |
+
},
|
14148 |
+
{
|
14149 |
+
"epoch": 0.27698461962893994,
|
14150 |
+
"grad_norm": 0.9795621037483215,
|
14151 |
+
"learning_rate": 0.00019593409269857325,
|
14152 |
+
"loss": 0.2905,
|
14153 |
+
"step": 2008
|
14154 |
+
},
|
14155 |
+
{
|
14156 |
+
"epoch": 0.2771225601765639,
|
14157 |
+
"grad_norm": 0.6783512234687805,
|
14158 |
+
"learning_rate": 0.00019593000865340238,
|
14159 |
+
"loss": 0.3544,
|
14160 |
+
"step": 2009
|
14161 |
+
},
|
14162 |
+
{
|
14163 |
+
"epoch": 0.2772605007241879,
|
14164 |
+
"grad_norm": 0.6736263036727905,
|
14165 |
+
"learning_rate": 0.00019592592260073613,
|
14166 |
+
"loss": 0.508,
|
14167 |
+
"step": 2010
|
14168 |
+
},
|
14169 |
+
{
|
14170 |
+
"epoch": 0.27739844127181185,
|
14171 |
+
"grad_norm": 0.8978418111801147,
|
14172 |
+
"learning_rate": 0.00019592183454065988,
|
14173 |
+
"loss": 0.8027,
|
14174 |
+
"step": 2011
|
14175 |
+
},
|
14176 |
+
{
|
14177 |
+
"epoch": 0.2775363818194358,
|
14178 |
+
"grad_norm": 0.877034604549408,
|
14179 |
+
"learning_rate": 0.0001959177444732593,
|
14180 |
+
"loss": 0.7846,
|
14181 |
+
"step": 2012
|
14182 |
+
},
|
14183 |
+
{
|
14184 |
+
"epoch": 0.2776743223670598,
|
14185 |
+
"grad_norm": 0.6984195113182068,
|
14186 |
+
"learning_rate": 0.00019591365239861994,
|
14187 |
+
"loss": 0.559,
|
14188 |
+
"step": 2013
|
14189 |
+
},
|
14190 |
+
{
|
14191 |
+
"epoch": 0.27781226291468375,
|
14192 |
+
"grad_norm": 1.1937421560287476,
|
14193 |
+
"learning_rate": 0.00019590955831682742,
|
14194 |
+
"loss": 0.7328,
|
14195 |
+
"step": 2014
|
14196 |
+
},
|
14197 |
+
{
|
14198 |
+
"epoch": 0.27795020346230775,
|
14199 |
+
"grad_norm": 0.7599695920944214,
|
14200 |
+
"learning_rate": 0.00019590546222796742,
|
14201 |
+
"loss": 0.392,
|
14202 |
+
"step": 2015
|
14203 |
+
},
|
14204 |
+
{
|
14205 |
+
"epoch": 0.2780881440099317,
|
14206 |
+
"grad_norm": 0.8356521129608154,
|
14207 |
+
"learning_rate": 0.00019590136413212566,
|
14208 |
+
"loss": 0.3812,
|
14209 |
+
"step": 2016
|
14210 |
+
},
|
14211 |
+
{
|
14212 |
+
"epoch": 0.2782260845575557,
|
14213 |
+
"grad_norm": 0.6217128038406372,
|
14214 |
+
"learning_rate": 0.00019589726402938792,
|
14215 |
+
"loss": 0.5064,
|
14216 |
+
"step": 2017
|
14217 |
+
},
|
14218 |
+
{
|
14219 |
+
"epoch": 0.27836402510517966,
|
14220 |
+
"grad_norm": 0.9336037039756775,
|
14221 |
+
"learning_rate": 0.00019589316191984,
|
14222 |
+
"loss": 0.7596,
|
14223 |
+
"step": 2018
|
14224 |
+
},
|
14225 |
+
{
|
14226 |
+
"epoch": 0.27850196565280366,
|
14227 |
+
"grad_norm": 0.6983953714370728,
|
14228 |
+
"learning_rate": 0.0001958890578035677,
|
14229 |
+
"loss": 0.7579,
|
14230 |
+
"step": 2019
|
14231 |
+
},
|
14232 |
+
{
|
14233 |
+
"epoch": 0.2786399062004276,
|
14234 |
+
"grad_norm": 0.6743526458740234,
|
14235 |
+
"learning_rate": 0.00019588495168065692,
|
14236 |
+
"loss": 0.4536,
|
14237 |
+
"step": 2020
|
14238 |
+
},
|
14239 |
+
{
|
14240 |
+
"epoch": 0.2787778467480516,
|
14241 |
+
"grad_norm": 0.8309145569801331,
|
14242 |
+
"learning_rate": 0.00019588084355119363,
|
14243 |
+
"loss": 0.5478,
|
14244 |
+
"step": 2021
|
14245 |
+
},
|
14246 |
+
{
|
14247 |
+
"epoch": 0.27891578729567557,
|
14248 |
+
"grad_norm": 0.9033045172691345,
|
14249 |
+
"learning_rate": 0.00019587673341526376,
|
14250 |
+
"loss": 0.4243,
|
14251 |
+
"step": 2022
|
14252 |
+
},
|
14253 |
+
{
|
14254 |
+
"epoch": 0.2790537278432995,
|
14255 |
+
"grad_norm": 0.8193897604942322,
|
14256 |
+
"learning_rate": 0.00019587262127295331,
|
14257 |
+
"loss": 0.5688,
|
14258 |
+
"step": 2023
|
14259 |
+
},
|
14260 |
+
{
|
14261 |
+
"epoch": 0.2791916683909235,
|
14262 |
+
"grad_norm": 0.6730914115905762,
|
14263 |
+
"learning_rate": 0.0001958685071243484,
|
14264 |
+
"loss": 0.5477,
|
14265 |
+
"step": 2024
|
14266 |
+
},
|
14267 |
+
{
|
14268 |
+
"epoch": 0.27932960893854747,
|
14269 |
+
"grad_norm": 0.5275852680206299,
|
14270 |
+
"learning_rate": 0.00019586439096953506,
|
14271 |
+
"loss": 0.2861,
|
14272 |
+
"step": 2025
|
14273 |
+
},
|
14274 |
+
{
|
14275 |
+
"epoch": 0.2794675494861715,
|
14276 |
+
"grad_norm": 0.7503786087036133,
|
14277 |
+
"learning_rate": 0.00019586027280859945,
|
14278 |
+
"loss": 0.7682,
|
14279 |
+
"step": 2026
|
14280 |
+
},
|
14281 |
+
{
|
14282 |
+
"epoch": 0.2796054900337954,
|
14283 |
+
"grad_norm": 0.6576685309410095,
|
14284 |
+
"learning_rate": 0.00019585615264162772,
|
14285 |
+
"loss": 0.6214,
|
14286 |
+
"step": 2027
|
14287 |
+
},
|
14288 |
+
{
|
14289 |
+
"epoch": 0.2797434305814194,
|
14290 |
+
"grad_norm": 0.7833530306816101,
|
14291 |
+
"learning_rate": 0.00019585203046870614,
|
14292 |
+
"loss": 0.9256,
|
14293 |
+
"step": 2028
|
14294 |
+
},
|
14295 |
+
{
|
14296 |
+
"epoch": 0.2798813711290434,
|
14297 |
+
"grad_norm": 0.779478132724762,
|
14298 |
+
"learning_rate": 0.00019584790628992098,
|
14299 |
+
"loss": 0.6512,
|
14300 |
+
"step": 2029
|
14301 |
+
},
|
14302 |
+
{
|
14303 |
+
"epoch": 0.2800193116766674,
|
14304 |
+
"grad_norm": 0.5535669922828674,
|
14305 |
+
"learning_rate": 0.0001958437801053585,
|
14306 |
+
"loss": 0.3869,
|
14307 |
+
"step": 2030
|
14308 |
+
},
|
14309 |
+
{
|
14310 |
+
"epoch": 0.28015725222429133,
|
14311 |
+
"grad_norm": 0.694486141204834,
|
14312 |
+
"learning_rate": 0.00019583965191510505,
|
14313 |
+
"loss": 0.3586,
|
14314 |
+
"step": 2031
|
14315 |
+
},
|
14316 |
+
{
|
14317 |
+
"epoch": 0.2802951927719153,
|
14318 |
+
"grad_norm": 0.7821094989776611,
|
14319 |
+
"learning_rate": 0.00019583552171924704,
|
14320 |
+
"loss": 0.5341,
|
14321 |
+
"step": 2032
|
14322 |
+
},
|
14323 |
+
{
|
14324 |
+
"epoch": 0.2804331333195393,
|
14325 |
+
"grad_norm": 0.570767879486084,
|
14326 |
+
"learning_rate": 0.0001958313895178709,
|
14327 |
+
"loss": 0.4214,
|
14328 |
+
"step": 2033
|
14329 |
+
},
|
14330 |
+
{
|
14331 |
+
"epoch": 0.28057107386716323,
|
14332 |
+
"grad_norm": 0.7766290307044983,
|
14333 |
+
"learning_rate": 0.00019582725531106307,
|
14334 |
+
"loss": 0.6409,
|
14335 |
+
"step": 2034
|
14336 |
+
},
|
14337 |
+
{
|
14338 |
+
"epoch": 0.28070901441478724,
|
14339 |
+
"grad_norm": 0.7544063925743103,
|
14340 |
+
"learning_rate": 0.00019582311909891012,
|
14341 |
+
"loss": 0.5586,
|
14342 |
+
"step": 2035
|
14343 |
+
},
|
14344 |
+
{
|
14345 |
+
"epoch": 0.2808469549624112,
|
14346 |
+
"grad_norm": 0.6841877102851868,
|
14347 |
+
"learning_rate": 0.0001958189808814986,
|
14348 |
+
"loss": 0.4377,
|
14349 |
+
"step": 2036
|
14350 |
+
},
|
14351 |
+
{
|
14352 |
+
"epoch": 0.2809848955100352,
|
14353 |
+
"grad_norm": 0.584334135055542,
|
14354 |
+
"learning_rate": 0.00019581484065891506,
|
14355 |
+
"loss": 0.5649,
|
14356 |
+
"step": 2037
|
14357 |
+
},
|
14358 |
+
{
|
14359 |
+
"epoch": 0.28112283605765914,
|
14360 |
+
"grad_norm": 0.7064344882965088,
|
14361 |
+
"learning_rate": 0.00019581069843124617,
|
14362 |
+
"loss": 0.8847,
|
14363 |
+
"step": 2038
|
14364 |
+
},
|
14365 |
+
{
|
14366 |
+
"epoch": 0.28126077660528315,
|
14367 |
+
"grad_norm": 0.5461025834083557,
|
14368 |
+
"learning_rate": 0.00019580655419857866,
|
14369 |
+
"loss": 0.3344,
|
14370 |
+
"step": 2039
|
14371 |
+
},
|
14372 |
+
{
|
14373 |
+
"epoch": 0.2813987171529071,
|
14374 |
+
"grad_norm": 1.2574125528335571,
|
14375 |
+
"learning_rate": 0.00019580240796099915,
|
14376 |
+
"loss": 0.7018,
|
14377 |
+
"step": 2040
|
14378 |
+
},
|
14379 |
+
{
|
14380 |
+
"epoch": 0.28153665770053105,
|
14381 |
+
"grad_norm": 1.02732253074646,
|
14382 |
+
"learning_rate": 0.00019579825971859452,
|
14383 |
+
"loss": 1.1026,
|
14384 |
+
"step": 2041
|
14385 |
+
},
|
14386 |
+
{
|
14387 |
+
"epoch": 0.28167459824815505,
|
14388 |
+
"grad_norm": 0.4866338074207306,
|
14389 |
+
"learning_rate": 0.00019579410947145146,
|
14390 |
+
"loss": 0.4095,
|
14391 |
+
"step": 2042
|
14392 |
+
},
|
14393 |
+
{
|
14394 |
+
"epoch": 0.281812538795779,
|
14395 |
+
"grad_norm": 0.7297942042350769,
|
14396 |
+
"learning_rate": 0.00019578995721965695,
|
14397 |
+
"loss": 0.7477,
|
14398 |
+
"step": 2043
|
14399 |
+
},
|
14400 |
+
{
|
14401 |
+
"epoch": 0.281950479343403,
|
14402 |
+
"grad_norm": 0.671257734298706,
|
14403 |
+
"learning_rate": 0.0001957858029632978,
|
14404 |
+
"loss": 0.6971,
|
14405 |
+
"step": 2044
|
14406 |
+
},
|
14407 |
+
{
|
14408 |
+
"epoch": 0.28208841989102695,
|
14409 |
+
"grad_norm": 0.6661747097969055,
|
14410 |
+
"learning_rate": 0.00019578164670246094,
|
14411 |
+
"loss": 0.4219,
|
14412 |
+
"step": 2045
|
14413 |
+
},
|
14414 |
+
{
|
14415 |
+
"epoch": 0.28222636043865096,
|
14416 |
+
"grad_norm": 1.152039885520935,
|
14417 |
+
"learning_rate": 0.00019577748843723337,
|
14418 |
+
"loss": 0.9014,
|
14419 |
+
"step": 2046
|
14420 |
+
},
|
14421 |
+
{
|
14422 |
+
"epoch": 0.2823643009862749,
|
14423 |
+
"grad_norm": 0.8204615712165833,
|
14424 |
+
"learning_rate": 0.0001957733281677021,
|
14425 |
+
"loss": 0.7038,
|
14426 |
+
"step": 2047
|
14427 |
+
},
|
14428 |
+
{
|
14429 |
+
"epoch": 0.2825022415338989,
|
14430 |
+
"grad_norm": 0.6705266237258911,
|
14431 |
+
"learning_rate": 0.00019576916589395424,
|
14432 |
+
"loss": 0.7392,
|
14433 |
+
"step": 2048
|
14434 |
+
},
|
14435 |
+
{
|
14436 |
+
"epoch": 0.28264018208152286,
|
14437 |
+
"grad_norm": 1.229459524154663,
|
14438 |
+
"learning_rate": 0.00019576500161607685,
|
14439 |
+
"loss": 1.0651,
|
14440 |
+
"step": 2049
|
14441 |
+
},
|
14442 |
+
{
|
14443 |
+
"epoch": 0.2827781226291468,
|
14444 |
+
"grad_norm": 0.685117244720459,
|
14445 |
+
"learning_rate": 0.00019576083533415703,
|
14446 |
+
"loss": 0.7557,
|
14447 |
+
"step": 2050
|
14448 |
+
},
|
14449 |
+
{
|
14450 |
+
"epoch": 0.2829160631767708,
|
14451 |
+
"grad_norm": 0.8755848407745361,
|
14452 |
+
"learning_rate": 0.00019575666704828206,
|
14453 |
+
"loss": 0.931,
|
14454 |
+
"step": 2051
|
14455 |
+
},
|
14456 |
+
{
|
14457 |
+
"epoch": 0.28305400372439476,
|
14458 |
+
"grad_norm": 0.6004536747932434,
|
14459 |
+
"learning_rate": 0.00019575249675853908,
|
14460 |
+
"loss": 0.5779,
|
14461 |
+
"step": 2052
|
14462 |
+
},
|
14463 |
+
{
|
14464 |
+
"epoch": 0.28319194427201877,
|
14465 |
+
"grad_norm": 0.671427845954895,
|
14466 |
+
"learning_rate": 0.00019574832446501544,
|
14467 |
+
"loss": 0.4515,
|
14468 |
+
"step": 2053
|
14469 |
+
},
|
14470 |
+
{
|
14471 |
+
"epoch": 0.2833298848196427,
|
14472 |
+
"grad_norm": 0.9582410454750061,
|
14473 |
+
"learning_rate": 0.0001957441501677984,
|
14474 |
+
"loss": 0.6176,
|
14475 |
+
"step": 2054
|
14476 |
+
},
|
14477 |
+
{
|
14478 |
+
"epoch": 0.2834678253672667,
|
14479 |
+
"grad_norm": 0.8629324436187744,
|
14480 |
+
"learning_rate": 0.00019573997386697532,
|
14481 |
+
"loss": 0.7077,
|
14482 |
+
"step": 2055
|
14483 |
+
},
|
14484 |
+
{
|
14485 |
+
"epoch": 0.2836057659148907,
|
14486 |
+
"grad_norm": 0.9006950259208679,
|
14487 |
+
"learning_rate": 0.0001957357955626336,
|
14488 |
+
"loss": 0.6964,
|
14489 |
+
"step": 2056
|
14490 |
+
},
|
14491 |
+
{
|
14492 |
+
"epoch": 0.2837437064625147,
|
14493 |
+
"grad_norm": 0.6615795493125916,
|
14494 |
+
"learning_rate": 0.0001957316152548607,
|
14495 |
+
"loss": 0.4452,
|
14496 |
+
"step": 2057
|
14497 |
+
},
|
14498 |
+
{
|
14499 |
+
"epoch": 0.2838816470101386,
|
14500 |
+
"grad_norm": 0.7859619855880737,
|
14501 |
+
"learning_rate": 0.00019572743294374404,
|
14502 |
+
"loss": 1.0109,
|
14503 |
+
"step": 2058
|
14504 |
+
},
|
14505 |
+
{
|
14506 |
+
"epoch": 0.28401958755776263,
|
14507 |
+
"grad_norm": 0.6359809041023254,
|
14508 |
+
"learning_rate": 0.00019572324862937124,
|
14509 |
+
"loss": 0.452,
|
14510 |
+
"step": 2059
|
14511 |
+
},
|
14512 |
+
{
|
14513 |
+
"epoch": 0.2841575281053866,
|
14514 |
+
"grad_norm": 0.780289351940155,
|
14515 |
+
"learning_rate": 0.00019571906231182978,
|
14516 |
+
"loss": 0.8381,
|
14517 |
+
"step": 2060
|
14518 |
+
},
|
14519 |
+
{
|
14520 |
+
"epoch": 0.28429546865301053,
|
14521 |
+
"grad_norm": 0.8848547339439392,
|
14522 |
+
"learning_rate": 0.0001957148739912073,
|
14523 |
+
"loss": 0.6521,
|
14524 |
+
"step": 2061
|
14525 |
+
},
|
14526 |
+
{
|
14527 |
+
"epoch": 0.28443340920063453,
|
14528 |
+
"grad_norm": 0.6815661787986755,
|
14529 |
+
"learning_rate": 0.00019571068366759143,
|
14530 |
+
"loss": 0.5813,
|
14531 |
+
"step": 2062
|
14532 |
+
},
|
14533 |
+
{
|
14534 |
+
"epoch": 0.2845713497482585,
|
14535 |
+
"grad_norm": 0.5312855243682861,
|
14536 |
+
"learning_rate": 0.00019570649134106985,
|
14537 |
+
"loss": 0.3351,
|
14538 |
+
"step": 2063
|
14539 |
+
},
|
14540 |
+
{
|
14541 |
+
"epoch": 0.2847092902958825,
|
14542 |
+
"grad_norm": 0.5981124043464661,
|
14543 |
+
"learning_rate": 0.00019570229701173036,
|
14544 |
+
"loss": 0.4126,
|
14545 |
+
"step": 2064
|
14546 |
+
},
|
14547 |
+
{
|
14548 |
+
"epoch": 0.28484723084350644,
|
14549 |
+
"grad_norm": 0.9804319739341736,
|
14550 |
+
"learning_rate": 0.00019569810067966066,
|
14551 |
+
"loss": 0.8333,
|
14552 |
+
"step": 2065
|
14553 |
+
},
|
14554 |
+
{
|
14555 |
+
"epoch": 0.28498517139113044,
|
14556 |
+
"grad_norm": 1.0361062288284302,
|
14557 |
+
"learning_rate": 0.00019569390234494858,
|
14558 |
+
"loss": 0.6087,
|
14559 |
+
"step": 2066
|
14560 |
+
},
|
14561 |
+
{
|
14562 |
+
"epoch": 0.2851231119387544,
|
14563 |
+
"grad_norm": 0.7839725017547607,
|
14564 |
+
"learning_rate": 0.000195689702007682,
|
14565 |
+
"loss": 0.7404,
|
14566 |
+
"step": 2067
|
14567 |
+
},
|
14568 |
+
{
|
14569 |
+
"epoch": 0.2852610524863784,
|
14570 |
+
"grad_norm": 1.3355668783187866,
|
14571 |
+
"learning_rate": 0.0001956854996679488,
|
14572 |
+
"loss": 0.4984,
|
14573 |
+
"step": 2068
|
14574 |
+
},
|
14575 |
+
{
|
14576 |
+
"epoch": 0.28539899303400235,
|
14577 |
+
"grad_norm": 0.6724937558174133,
|
14578 |
+
"learning_rate": 0.00019568129532583693,
|
14579 |
+
"loss": 0.4341,
|
14580 |
+
"step": 2069
|
14581 |
+
},
|
14582 |
+
{
|
14583 |
+
"epoch": 0.2855369335816263,
|
14584 |
+
"grad_norm": 0.7715407013893127,
|
14585 |
+
"learning_rate": 0.00019567708898143437,
|
14586 |
+
"loss": 0.6913,
|
14587 |
+
"step": 2070
|
14588 |
+
},
|
14589 |
+
{
|
14590 |
+
"epoch": 0.2856748741292503,
|
14591 |
+
"grad_norm": 0.8403461575508118,
|
14592 |
+
"learning_rate": 0.00019567288063482914,
|
14593 |
+
"loss": 0.5184,
|
14594 |
+
"step": 2071
|
14595 |
+
},
|
14596 |
+
{
|
14597 |
+
"epoch": 0.28581281467687425,
|
14598 |
+
"grad_norm": 0.6787713766098022,
|
14599 |
+
"learning_rate": 0.0001956686702861093,
|
14600 |
+
"loss": 0.5928,
|
14601 |
+
"step": 2072
|
14602 |
+
},
|
14603 |
+
{
|
14604 |
+
"epoch": 0.28595075522449825,
|
14605 |
+
"grad_norm": 0.5545241832733154,
|
14606 |
+
"learning_rate": 0.00019566445793536299,
|
14607 |
+
"loss": 0.4176,
|
14608 |
+
"step": 2073
|
14609 |
+
},
|
14610 |
+
{
|
14611 |
+
"epoch": 0.2860886957721222,
|
14612 |
+
"grad_norm": 0.5456835031509399,
|
14613 |
+
"learning_rate": 0.00019566024358267834,
|
14614 |
+
"loss": 0.409,
|
14615 |
+
"step": 2074
|
14616 |
+
},
|
14617 |
+
{
|
14618 |
+
"epoch": 0.2862266363197462,
|
14619 |
+
"grad_norm": 1.8867385387420654,
|
14620 |
+
"learning_rate": 0.00019565602722814354,
|
14621 |
+
"loss": 0.6322,
|
14622 |
+
"step": 2075
|
14623 |
+
},
|
14624 |
+
{
|
14625 |
+
"epoch": 0.28636457686737016,
|
14626 |
+
"grad_norm": 0.7244119644165039,
|
14627 |
+
"learning_rate": 0.0001956518088718468,
|
14628 |
+
"loss": 0.4894,
|
14629 |
+
"step": 2076
|
14630 |
+
},
|
14631 |
+
{
|
14632 |
+
"epoch": 0.28650251741499416,
|
14633 |
+
"grad_norm": 0.7089682817459106,
|
14634 |
+
"learning_rate": 0.00019564758851387649,
|
14635 |
+
"loss": 0.5693,
|
14636 |
+
"step": 2077
|
14637 |
+
},
|
14638 |
+
{
|
14639 |
+
"epoch": 0.2866404579626181,
|
14640 |
+
"grad_norm": 0.6970006823539734,
|
14641 |
+
"learning_rate": 0.0001956433661543208,
|
14642 |
+
"loss": 0.4493,
|
14643 |
+
"step": 2078
|
14644 |
+
},
|
14645 |
+
{
|
14646 |
+
"epoch": 0.28677839851024206,
|
14647 |
+
"grad_norm": 0.7393503785133362,
|
14648 |
+
"learning_rate": 0.00019563914179326818,
|
14649 |
+
"loss": 0.5863,
|
14650 |
+
"step": 2079
|
14651 |
+
},
|
14652 |
+
{
|
14653 |
+
"epoch": 0.28691633905786607,
|
14654 |
+
"grad_norm": 0.6624215841293335,
|
14655 |
+
"learning_rate": 0.00019563491543080698,
|
14656 |
+
"loss": 0.3739,
|
14657 |
+
"step": 2080
|
14658 |
+
},
|
14659 |
+
{
|
14660 |
+
"epoch": 0.28705427960549,
|
14661 |
+
"grad_norm": 0.7205662727355957,
|
14662 |
+
"learning_rate": 0.0001956306870670257,
|
14663 |
+
"loss": 0.5124,
|
14664 |
+
"step": 2081
|
14665 |
+
},
|
14666 |
+
{
|
14667 |
+
"epoch": 0.287192220153114,
|
14668 |
+
"grad_norm": 1.1564881801605225,
|
14669 |
+
"learning_rate": 0.00019562645670201276,
|
14670 |
+
"loss": 1.0517,
|
14671 |
+
"step": 2082
|
14672 |
+
},
|
14673 |
+
{
|
14674 |
+
"epoch": 0.28733016070073797,
|
14675 |
+
"grad_norm": 0.7639877796173096,
|
14676 |
+
"learning_rate": 0.00019562222433585673,
|
14677 |
+
"loss": 0.9036,
|
14678 |
+
"step": 2083
|
14679 |
+
},
|
14680 |
+
{
|
14681 |
+
"epoch": 0.287468101248362,
|
14682 |
+
"grad_norm": 0.6498881578445435,
|
14683 |
+
"learning_rate": 0.00019561798996864618,
|
14684 |
+
"loss": 0.618,
|
14685 |
+
"step": 2084
|
14686 |
+
},
|
14687 |
+
{
|
14688 |
+
"epoch": 0.2876060417959859,
|
14689 |
+
"grad_norm": 0.7746434807777405,
|
14690 |
+
"learning_rate": 0.0001956137536004697,
|
14691 |
+
"loss": 0.5444,
|
14692 |
+
"step": 2085
|
14693 |
+
},
|
14694 |
+
{
|
14695 |
+
"epoch": 0.2877439823436099,
|
14696 |
+
"grad_norm": 1.1528464555740356,
|
14697 |
+
"learning_rate": 0.00019560951523141595,
|
14698 |
+
"loss": 0.4188,
|
14699 |
+
"step": 2086
|
14700 |
+
},
|
14701 |
+
{
|
14702 |
+
"epoch": 0.2878819228912339,
|
14703 |
+
"grad_norm": 0.6776193976402283,
|
14704 |
+
"learning_rate": 0.00019560527486157364,
|
14705 |
+
"loss": 0.4812,
|
14706 |
+
"step": 2087
|
14707 |
+
},
|
14708 |
+
{
|
14709 |
+
"epoch": 0.2880198634388578,
|
14710 |
+
"grad_norm": 1.0938503742218018,
|
14711 |
+
"learning_rate": 0.00019560103249103148,
|
14712 |
+
"loss": 0.8737,
|
14713 |
+
"step": 2088
|
14714 |
+
},
|
14715 |
+
{
|
14716 |
+
"epoch": 0.28815780398648183,
|
14717 |
+
"grad_norm": 0.6782721281051636,
|
14718 |
+
"learning_rate": 0.00019559678811987828,
|
14719 |
+
"loss": 0.5982,
|
14720 |
+
"step": 2089
|
14721 |
+
},
|
14722 |
+
{
|
14723 |
+
"epoch": 0.2882957445341058,
|
14724 |
+
"grad_norm": 0.6858242154121399,
|
14725 |
+
"learning_rate": 0.00019559254174820282,
|
14726 |
+
"loss": 0.6636,
|
14727 |
+
"step": 2090
|
14728 |
+
},
|
14729 |
+
{
|
14730 |
+
"epoch": 0.2884336850817298,
|
14731 |
+
"grad_norm": 0.8259555697441101,
|
14732 |
+
"learning_rate": 0.00019558829337609402,
|
14733 |
+
"loss": 0.4079,
|
14734 |
+
"step": 2091
|
14735 |
+
},
|
14736 |
+
{
|
14737 |
+
"epoch": 0.28857162562935373,
|
14738 |
+
"grad_norm": 0.8771445155143738,
|
14739 |
+
"learning_rate": 0.00019558404300364072,
|
14740 |
+
"loss": 0.6069,
|
14741 |
+
"step": 2092
|
14742 |
+
},
|
14743 |
+
{
|
14744 |
+
"epoch": 0.28870956617697774,
|
14745 |
+
"grad_norm": 0.5591592192649841,
|
14746 |
+
"learning_rate": 0.00019557979063093188,
|
14747 |
+
"loss": 0.373,
|
14748 |
+
"step": 2093
|
14749 |
+
},
|
14750 |
+
{
|
14751 |
+
"epoch": 0.2888475067246017,
|
14752 |
+
"grad_norm": 0.7256616353988647,
|
14753 |
+
"learning_rate": 0.00019557553625805657,
|
14754 |
+
"loss": 0.6074,
|
14755 |
+
"step": 2094
|
14756 |
+
},
|
14757 |
+
{
|
14758 |
+
"epoch": 0.2889854472722257,
|
14759 |
+
"grad_norm": 0.646175742149353,
|
14760 |
+
"learning_rate": 0.00019557127988510372,
|
14761 |
+
"loss": 0.6554,
|
14762 |
+
"step": 2095
|
14763 |
+
},
|
14764 |
+
{
|
14765 |
+
"epoch": 0.28912338781984964,
|
14766 |
+
"grad_norm": 0.5466925501823425,
|
14767 |
+
"learning_rate": 0.00019556702151216242,
|
14768 |
+
"loss": 0.4869,
|
14769 |
+
"step": 2096
|
14770 |
+
},
|
14771 |
+
{
|
14772 |
+
"epoch": 0.2892613283674736,
|
14773 |
+
"grad_norm": 0.8264899253845215,
|
14774 |
+
"learning_rate": 0.00019556276113932183,
|
14775 |
+
"loss": 0.5827,
|
14776 |
+
"step": 2097
|
14777 |
+
},
|
14778 |
+
{
|
14779 |
+
"epoch": 0.2893992689150976,
|
14780 |
+
"grad_norm": 0.7389553189277649,
|
14781 |
+
"learning_rate": 0.00019555849876667103,
|
14782 |
+
"loss": 0.3154,
|
14783 |
+
"step": 2098
|
14784 |
+
},
|
14785 |
+
{
|
14786 |
+
"epoch": 0.28953720946272155,
|
14787 |
+
"grad_norm": 0.6903903484344482,
|
14788 |
+
"learning_rate": 0.0001955542343942993,
|
14789 |
+
"loss": 0.7065,
|
14790 |
+
"step": 2099
|
14791 |
+
},
|
14792 |
+
{
|
14793 |
+
"epoch": 0.28967515001034555,
|
14794 |
+
"grad_norm": 1.1037869453430176,
|
14795 |
+
"learning_rate": 0.00019554996802229583,
|
14796 |
+
"loss": 0.7192,
|
14797 |
+
"step": 2100
|
14798 |
+
},
|
14799 |
+
{
|
14800 |
+
"epoch": 0.2898130905579695,
|
14801 |
+
"grad_norm": 1.640199065208435,
|
14802 |
+
"learning_rate": 0.00019554569965074992,
|
14803 |
+
"loss": 0.553,
|
14804 |
+
"step": 2101
|
14805 |
+
},
|
14806 |
+
{
|
14807 |
+
"epoch": 0.2899510311055935,
|
14808 |
+
"grad_norm": 0.6174784302711487,
|
14809 |
+
"learning_rate": 0.00019554142927975088,
|
14810 |
+
"loss": 0.5931,
|
14811 |
+
"step": 2102
|
14812 |
+
},
|
14813 |
+
{
|
14814 |
+
"epoch": 0.29008897165321745,
|
14815 |
+
"grad_norm": 0.9308575987815857,
|
14816 |
+
"learning_rate": 0.0001955371569093881,
|
14817 |
+
"loss": 0.5784,
|
14818 |
+
"step": 2103
|
14819 |
+
},
|
14820 |
+
{
|
14821 |
+
"epoch": 0.29022691220084146,
|
14822 |
+
"grad_norm": 1.0464619398117065,
|
14823 |
+
"learning_rate": 0.00019553288253975094,
|
14824 |
+
"loss": 0.6852,
|
14825 |
+
"step": 2104
|
14826 |
+
},
|
14827 |
+
{
|
14828 |
+
"epoch": 0.2903648527484654,
|
14829 |
+
"grad_norm": 0.8496968746185303,
|
14830 |
+
"learning_rate": 0.00019552860617092887,
|
14831 |
+
"loss": 0.7255,
|
14832 |
+
"step": 2105
|
14833 |
+
},
|
14834 |
+
{
|
14835 |
+
"epoch": 0.2905027932960894,
|
14836 |
+
"grad_norm": 0.7223439812660217,
|
14837 |
+
"learning_rate": 0.00019552432780301139,
|
14838 |
+
"loss": 0.6047,
|
14839 |
+
"step": 2106
|
14840 |
+
},
|
14841 |
+
{
|
14842 |
+
"epoch": 0.29064073384371336,
|
14843 |
+
"grad_norm": 0.7429842948913574,
|
14844 |
+
"learning_rate": 0.00019552004743608804,
|
14845 |
+
"loss": 0.4424,
|
14846 |
+
"step": 2107
|
14847 |
+
},
|
14848 |
+
{
|
14849 |
+
"epoch": 0.2907786743913373,
|
14850 |
+
"grad_norm": 0.6475143432617188,
|
14851 |
+
"learning_rate": 0.0001955157650702484,
|
14852 |
+
"loss": 0.4936,
|
14853 |
+
"step": 2108
|
14854 |
+
},
|
14855 |
+
{
|
14856 |
+
"epoch": 0.2909166149389613,
|
14857 |
+
"grad_norm": 0.744999885559082,
|
14858 |
+
"learning_rate": 0.00019551148070558205,
|
14859 |
+
"loss": 0.7856,
|
14860 |
+
"step": 2109
|
14861 |
+
},
|
14862 |
+
{
|
14863 |
+
"epoch": 0.29105455548658526,
|
14864 |
+
"grad_norm": 0.6844127178192139,
|
14865 |
+
"learning_rate": 0.00019550719434217865,
|
14866 |
+
"loss": 0.6373,
|
14867 |
+
"step": 2110
|
14868 |
+
},
|
14869 |
+
{
|
14870 |
+
"epoch": 0.29119249603420927,
|
14871 |
+
"grad_norm": 0.7517552375793457,
|
14872 |
+
"learning_rate": 0.00019550290598012793,
|
14873 |
+
"loss": 0.4843,
|
14874 |
+
"step": 2111
|
14875 |
+
},
|
14876 |
+
{
|
14877 |
+
"epoch": 0.2913304365818332,
|
14878 |
+
"grad_norm": 0.7101173996925354,
|
14879 |
+
"learning_rate": 0.00019549861561951959,
|
14880 |
+
"loss": 0.3913,
|
14881 |
+
"step": 2112
|
14882 |
+
},
|
14883 |
+
{
|
14884 |
+
"epoch": 0.2914683771294572,
|
14885 |
+
"grad_norm": 0.721660852432251,
|
14886 |
+
"learning_rate": 0.00019549432326044345,
|
14887 |
+
"loss": 0.302,
|
14888 |
+
"step": 2113
|
14889 |
+
},
|
14890 |
+
{
|
14891 |
+
"epoch": 0.2916063176770812,
|
14892 |
+
"grad_norm": 0.6831248998641968,
|
14893 |
+
"learning_rate": 0.00019549002890298934,
|
14894 |
+
"loss": 0.5832,
|
14895 |
+
"step": 2114
|
14896 |
+
},
|
14897 |
+
{
|
14898 |
+
"epoch": 0.2917442582247052,
|
14899 |
+
"grad_norm": 0.6875988245010376,
|
14900 |
+
"learning_rate": 0.00019548573254724708,
|
14901 |
+
"loss": 0.3342,
|
14902 |
+
"step": 2115
|
14903 |
+
},
|
14904 |
+
{
|
14905 |
+
"epoch": 0.2918821987723291,
|
14906 |
+
"grad_norm": 0.5807436108589172,
|
14907 |
+
"learning_rate": 0.00019548143419330661,
|
14908 |
+
"loss": 0.7861,
|
14909 |
+
"step": 2116
|
14910 |
+
},
|
14911 |
+
{
|
14912 |
+
"epoch": 0.2920201393199531,
|
14913 |
+
"grad_norm": 1.089648962020874,
|
14914 |
+
"learning_rate": 0.0001954771338412579,
|
14915 |
+
"loss": 0.6961,
|
14916 |
+
"step": 2117
|
14917 |
+
},
|
14918 |
+
{
|
14919 |
+
"epoch": 0.2921580798675771,
|
14920 |
+
"grad_norm": 0.7699464559555054,
|
14921 |
+
"learning_rate": 0.00019547283149119092,
|
14922 |
+
"loss": 0.4305,
|
14923 |
+
"step": 2118
|
14924 |
+
},
|
14925 |
+
{
|
14926 |
+
"epoch": 0.29229602041520103,
|
14927 |
+
"grad_norm": 0.7928016781806946,
|
14928 |
+
"learning_rate": 0.0001954685271431957,
|
14929 |
+
"loss": 0.6308,
|
14930 |
+
"step": 2119
|
14931 |
+
},
|
14932 |
+
{
|
14933 |
+
"epoch": 0.29243396096282503,
|
14934 |
+
"grad_norm": 0.9115915298461914,
|
14935 |
+
"learning_rate": 0.0001954642207973623,
|
14936 |
+
"loss": 0.6821,
|
14937 |
+
"step": 2120
|
14938 |
+
},
|
14939 |
+
{
|
14940 |
+
"epoch": 0.292571901510449,
|
14941 |
+
"grad_norm": 0.8448048233985901,
|
14942 |
+
"learning_rate": 0.00019545991245378087,
|
14943 |
+
"loss": 0.7006,
|
14944 |
+
"step": 2121
|
14945 |
+
},
|
14946 |
+
{
|
14947 |
+
"epoch": 0.292709842058073,
|
14948 |
+
"grad_norm": 0.7800453305244446,
|
14949 |
+
"learning_rate": 0.00019545560211254155,
|
14950 |
+
"loss": 0.5068,
|
14951 |
+
"step": 2122
|
14952 |
+
},
|
14953 |
+
{
|
14954 |
+
"epoch": 0.29284778260569694,
|
14955 |
+
"grad_norm": 0.8466667532920837,
|
14956 |
+
"learning_rate": 0.00019545128977373454,
|
14957 |
+
"loss": 0.8495,
|
14958 |
+
"step": 2123
|
14959 |
+
},
|
14960 |
+
{
|
14961 |
+
"epoch": 0.29298572315332094,
|
14962 |
+
"grad_norm": 0.5569940805435181,
|
14963 |
+
"learning_rate": 0.00019544697543745013,
|
14964 |
+
"loss": 0.4394,
|
14965 |
+
"step": 2124
|
14966 |
+
},
|
14967 |
+
{
|
14968 |
+
"epoch": 0.2931236637009449,
|
14969 |
+
"grad_norm": 1.0552653074264526,
|
14970 |
+
"learning_rate": 0.0001954426591037785,
|
14971 |
+
"loss": 0.932,
|
14972 |
+
"step": 2125
|
14973 |
+
},
|
14974 |
+
{
|
14975 |
+
"epoch": 0.29326160424856884,
|
14976 |
+
"grad_norm": 0.8036336898803711,
|
14977 |
+
"learning_rate": 0.00019543834077281007,
|
14978 |
+
"loss": 0.5089,
|
14979 |
+
"step": 2126
|
14980 |
+
},
|
14981 |
+
{
|
14982 |
+
"epoch": 0.29339954479619285,
|
14983 |
+
"grad_norm": 0.9038389325141907,
|
14984 |
+
"learning_rate": 0.00019543402044463521,
|
14985 |
+
"loss": 0.4724,
|
14986 |
+
"step": 2127
|
14987 |
+
},
|
14988 |
+
{
|
14989 |
+
"epoch": 0.2935374853438168,
|
14990 |
+
"grad_norm": 0.5815970301628113,
|
14991 |
+
"learning_rate": 0.00019542969811934426,
|
14992 |
+
"loss": 0.3765,
|
14993 |
+
"step": 2128
|
14994 |
+
},
|
14995 |
+
{
|
14996 |
+
"epoch": 0.2936754258914408,
|
14997 |
+
"grad_norm": 0.8801495432853699,
|
14998 |
+
"learning_rate": 0.00019542537379702772,
|
14999 |
+
"loss": 0.6568,
|
15000 |
+
"step": 2129
|
15001 |
+
},
|
15002 |
+
{
|
15003 |
+
"epoch": 0.29381336643906475,
|
15004 |
+
"grad_norm": 0.9125809073448181,
|
15005 |
+
"learning_rate": 0.0001954210474777761,
|
15006 |
+
"loss": 1.0139,
|
15007 |
+
"step": 2130
|
15008 |
+
},
|
15009 |
+
{
|
15010 |
+
"epoch": 0.29395130698668875,
|
15011 |
+
"grad_norm": 0.6941331624984741,
|
15012 |
+
"learning_rate": 0.00019541671916167987,
|
15013 |
+
"loss": 0.5135,
|
15014 |
+
"step": 2131
|
15015 |
+
},
|
15016 |
+
{
|
15017 |
+
"epoch": 0.2940892475343127,
|
15018 |
+
"grad_norm": 0.721238911151886,
|
15019 |
+
"learning_rate": 0.00019541238884882966,
|
15020 |
+
"loss": 0.3743,
|
15021 |
+
"step": 2132
|
15022 |
+
},
|
15023 |
+
{
|
15024 |
+
"epoch": 0.2942271880819367,
|
15025 |
+
"grad_norm": 1.1131097078323364,
|
15026 |
+
"learning_rate": 0.00019540805653931609,
|
15027 |
+
"loss": 0.8024,
|
15028 |
+
"step": 2133
|
15029 |
+
},
|
15030 |
+
{
|
15031 |
+
"epoch": 0.29436512862956066,
|
15032 |
+
"grad_norm": 0.6035623550415039,
|
15033 |
+
"learning_rate": 0.0001954037222332298,
|
15034 |
+
"loss": 0.4709,
|
15035 |
+
"step": 2134
|
15036 |
+
},
|
15037 |
+
{
|
15038 |
+
"epoch": 0.2945030691771846,
|
15039 |
+
"grad_norm": 0.6855323314666748,
|
15040 |
+
"learning_rate": 0.0001953993859306615,
|
15041 |
+
"loss": 0.4904,
|
15042 |
+
"step": 2135
|
15043 |
+
},
|
15044 |
+
{
|
15045 |
+
"epoch": 0.2946410097248086,
|
15046 |
+
"grad_norm": 0.7864904403686523,
|
15047 |
+
"learning_rate": 0.00019539504763170192,
|
15048 |
+
"loss": 0.73,
|
15049 |
+
"step": 2136
|
15050 |
+
},
|
15051 |
+
{
|
15052 |
+
"epoch": 0.29477895027243256,
|
15053 |
+
"grad_norm": 1.1502920389175415,
|
15054 |
+
"learning_rate": 0.0001953907073364419,
|
15055 |
+
"loss": 1.1485,
|
15056 |
+
"step": 2137
|
15057 |
+
},
|
15058 |
+
{
|
15059 |
+
"epoch": 0.29491689082005657,
|
15060 |
+
"grad_norm": 0.8464686870574951,
|
15061 |
+
"learning_rate": 0.0001953863650449722,
|
15062 |
+
"loss": 0.6272,
|
15063 |
+
"step": 2138
|
15064 |
+
},
|
15065 |
+
{
|
15066 |
+
"epoch": 0.2950548313676805,
|
15067 |
+
"grad_norm": 0.645497739315033,
|
15068 |
+
"learning_rate": 0.00019538202075738373,
|
15069 |
+
"loss": 0.5731,
|
15070 |
+
"step": 2139
|
15071 |
+
},
|
15072 |
+
{
|
15073 |
+
"epoch": 0.2951927719153045,
|
15074 |
+
"grad_norm": 0.7950919270515442,
|
15075 |
+
"learning_rate": 0.00019537767447376736,
|
15076 |
+
"loss": 0.6039,
|
15077 |
+
"step": 2140
|
15078 |
+
},
|
15079 |
+
{
|
15080 |
+
"epoch": 0.29533071246292847,
|
15081 |
+
"grad_norm": 0.7011622190475464,
|
15082 |
+
"learning_rate": 0.0001953733261942141,
|
15083 |
+
"loss": 0.4037,
|
15084 |
+
"step": 2141
|
15085 |
+
},
|
15086 |
+
{
|
15087 |
+
"epoch": 0.2954686530105525,
|
15088 |
+
"grad_norm": 0.7136140465736389,
|
15089 |
+
"learning_rate": 0.0001953689759188149,
|
15090 |
+
"loss": 0.6339,
|
15091 |
+
"step": 2142
|
15092 |
+
},
|
15093 |
+
{
|
15094 |
+
"epoch": 0.2956065935581764,
|
15095 |
+
"grad_norm": 1.0056228637695312,
|
15096 |
+
"learning_rate": 0.0001953646236476608,
|
15097 |
+
"loss": 0.5475,
|
15098 |
+
"step": 2143
|
15099 |
+
},
|
15100 |
+
{
|
15101 |
+
"epoch": 0.2957445341058004,
|
15102 |
+
"grad_norm": 0.8086057901382446,
|
15103 |
+
"learning_rate": 0.00019536026938084296,
|
15104 |
+
"loss": 0.6254,
|
15105 |
+
"step": 2144
|
15106 |
+
},
|
15107 |
+
{
|
15108 |
+
"epoch": 0.2958824746534244,
|
15109 |
+
"grad_norm": 0.8735644817352295,
|
15110 |
+
"learning_rate": 0.00019535591311845235,
|
15111 |
+
"loss": 0.5957,
|
15112 |
+
"step": 2145
|
15113 |
+
},
|
15114 |
+
{
|
15115 |
+
"epoch": 0.2960204152010483,
|
15116 |
+
"grad_norm": 0.6294654607772827,
|
15117 |
+
"learning_rate": 0.00019535155486058027,
|
15118 |
+
"loss": 0.4358,
|
15119 |
+
"step": 2146
|
15120 |
+
},
|
15121 |
+
{
|
15122 |
+
"epoch": 0.29615835574867233,
|
15123 |
+
"grad_norm": 0.6147223114967346,
|
15124 |
+
"learning_rate": 0.00019534719460731785,
|
15125 |
+
"loss": 0.5106,
|
15126 |
+
"step": 2147
|
15127 |
+
},
|
15128 |
+
{
|
15129 |
+
"epoch": 0.2962962962962963,
|
15130 |
+
"grad_norm": 0.6865537166595459,
|
15131 |
+
"learning_rate": 0.00019534283235875637,
|
15132 |
+
"loss": 0.6796,
|
15133 |
+
"step": 2148
|
15134 |
+
},
|
15135 |
+
{
|
15136 |
+
"epoch": 0.2964342368439203,
|
15137 |
+
"grad_norm": 0.8193590641021729,
|
15138 |
+
"learning_rate": 0.0001953384681149871,
|
15139 |
+
"loss": 0.6479,
|
15140 |
+
"step": 2149
|
15141 |
+
},
|
15142 |
+
{
|
15143 |
+
"epoch": 0.29657217739154423,
|
15144 |
+
"grad_norm": 0.8016851544380188,
|
15145 |
+
"learning_rate": 0.00019533410187610138,
|
15146 |
+
"loss": 0.8757,
|
15147 |
+
"step": 2150
|
15148 |
+
},
|
15149 |
+
{
|
15150 |
+
"epoch": 0.29671011793916824,
|
15151 |
+
"grad_norm": 0.8192347288131714,
|
15152 |
+
"learning_rate": 0.00019532973364219054,
|
15153 |
+
"loss": 0.8549,
|
15154 |
+
"step": 2151
|
15155 |
+
},
|
15156 |
+
{
|
15157 |
+
"epoch": 0.2968480584867922,
|
15158 |
+
"grad_norm": 1.2745975255966187,
|
15159 |
+
"learning_rate": 0.000195325363413346,
|
15160 |
+
"loss": 0.8386,
|
15161 |
+
"step": 2152
|
15162 |
+
},
|
15163 |
+
{
|
15164 |
+
"epoch": 0.2969859990344162,
|
15165 |
+
"grad_norm": 0.7096378207206726,
|
15166 |
+
"learning_rate": 0.00019532099118965931,
|
15167 |
+
"loss": 0.5653,
|
15168 |
+
"step": 2153
|
15169 |
+
},
|
15170 |
+
{
|
15171 |
+
"epoch": 0.29712393958204014,
|
15172 |
+
"grad_norm": 0.5258468985557556,
|
15173 |
+
"learning_rate": 0.00019531661697122184,
|
15174 |
+
"loss": 0.3627,
|
15175 |
+
"step": 2154
|
15176 |
+
},
|
15177 |
+
{
|
15178 |
+
"epoch": 0.2972618801296641,
|
15179 |
+
"grad_norm": 0.6194223165512085,
|
15180 |
+
"learning_rate": 0.00019531224075812524,
|
15181 |
+
"loss": 0.2661,
|
15182 |
+
"step": 2155
|
15183 |
+
},
|
15184 |
+
{
|
15185 |
+
"epoch": 0.2973998206772881,
|
15186 |
+
"grad_norm": 0.760379433631897,
|
15187 |
+
"learning_rate": 0.000195307862550461,
|
15188 |
+
"loss": 0.5146,
|
15189 |
+
"step": 2156
|
15190 |
+
},
|
15191 |
+
{
|
15192 |
+
"epoch": 0.29753776122491205,
|
15193 |
+
"grad_norm": 0.6956475973129272,
|
15194 |
+
"learning_rate": 0.00019530348234832076,
|
15195 |
+
"loss": 0.8747,
|
15196 |
+
"step": 2157
|
15197 |
+
},
|
15198 |
+
{
|
15199 |
+
"epoch": 0.29767570177253605,
|
15200 |
+
"grad_norm": 0.6443366408348083,
|
15201 |
+
"learning_rate": 0.0001952991001517962,
|
15202 |
+
"loss": 0.3854,
|
15203 |
+
"step": 2158
|
15204 |
+
},
|
15205 |
+
{
|
15206 |
+
"epoch": 0.29781364232016,
|
15207 |
+
"grad_norm": 0.7563418745994568,
|
15208 |
+
"learning_rate": 0.00019529471596097902,
|
15209 |
+
"loss": 0.4861,
|
15210 |
+
"step": 2159
|
15211 |
+
},
|
15212 |
+
{
|
15213 |
+
"epoch": 0.297951582867784,
|
15214 |
+
"grad_norm": 1.3392752408981323,
|
15215 |
+
"learning_rate": 0.000195290329775961,
|
15216 |
+
"loss": 0.6267,
|
15217 |
+
"step": 2160
|
15218 |
+
},
|
15219 |
+
{
|
15220 |
+
"epoch": 0.29808952341540795,
|
15221 |
+
"grad_norm": 0.5877766013145447,
|
15222 |
+
"learning_rate": 0.00019528594159683385,
|
15223 |
+
"loss": 0.4547,
|
15224 |
+
"step": 2161
|
15225 |
+
},
|
15226 |
+
{
|
15227 |
+
"epoch": 0.29822746396303196,
|
15228 |
+
"grad_norm": 0.9212716817855835,
|
15229 |
+
"learning_rate": 0.00019528155142368948,
|
15230 |
+
"loss": 0.8183,
|
15231 |
+
"step": 2162
|
15232 |
+
},
|
15233 |
+
{
|
15234 |
+
"epoch": 0.2983654045106559,
|
15235 |
+
"grad_norm": 2.37778902053833,
|
15236 |
+
"learning_rate": 0.00019527715925661974,
|
15237 |
+
"loss": 0.9423,
|
15238 |
+
"step": 2163
|
15239 |
+
},
|
15240 |
+
{
|
15241 |
+
"epoch": 0.29850334505827986,
|
15242 |
+
"grad_norm": 0.6656827330589294,
|
15243 |
+
"learning_rate": 0.0001952727650957165,
|
15244 |
+
"loss": 0.4532,
|
15245 |
+
"step": 2164
|
15246 |
+
},
|
15247 |
+
{
|
15248 |
+
"epoch": 0.29864128560590386,
|
15249 |
+
"grad_norm": 0.9072442650794983,
|
15250 |
+
"learning_rate": 0.00019526836894107175,
|
15251 |
+
"loss": 0.8813,
|
15252 |
+
"step": 2165
|
15253 |
+
},
|
15254 |
+
{
|
15255 |
+
"epoch": 0.2987792261535278,
|
15256 |
+
"grad_norm": 1.0257484912872314,
|
15257 |
+
"learning_rate": 0.00019526397079277748,
|
15258 |
+
"loss": 0.8555,
|
15259 |
+
"step": 2166
|
15260 |
+
},
|
15261 |
+
{
|
15262 |
+
"epoch": 0.2989171667011518,
|
15263 |
+
"grad_norm": 0.8982290029525757,
|
15264 |
+
"learning_rate": 0.00019525957065092575,
|
15265 |
+
"loss": 0.5986,
|
15266 |
+
"step": 2167
|
15267 |
+
},
|
15268 |
+
{
|
15269 |
+
"epoch": 0.29905510724877576,
|
15270 |
+
"grad_norm": 0.6626170873641968,
|
15271 |
+
"learning_rate": 0.00019525516851560859,
|
15272 |
+
"loss": 0.6548,
|
15273 |
+
"step": 2168
|
15274 |
+
},
|
15275 |
+
{
|
15276 |
+
"epoch": 0.29919304779639977,
|
15277 |
+
"grad_norm": 1.0469564199447632,
|
15278 |
+
"learning_rate": 0.00019525076438691818,
|
15279 |
+
"loss": 1.2987,
|
15280 |
+
"step": 2169
|
15281 |
+
},
|
15282 |
+
{
|
15283 |
+
"epoch": 0.2993309883440237,
|
15284 |
+
"grad_norm": 0.9261853098869324,
|
15285 |
+
"learning_rate": 0.00019524635826494665,
|
15286 |
+
"loss": 0.7951,
|
15287 |
+
"step": 2170
|
15288 |
+
},
|
15289 |
+
{
|
15290 |
+
"epoch": 0.2994689288916477,
|
15291 |
+
"grad_norm": 0.6855120062828064,
|
15292 |
+
"learning_rate": 0.00019524195014978624,
|
15293 |
+
"loss": 0.5499,
|
15294 |
+
"step": 2171
|
15295 |
+
},
|
15296 |
+
{
|
15297 |
+
"epoch": 0.2996068694392717,
|
15298 |
+
"grad_norm": 0.8790969252586365,
|
15299 |
+
"learning_rate": 0.00019523754004152912,
|
15300 |
+
"loss": 0.6992,
|
15301 |
+
"step": 2172
|
15302 |
+
},
|
15303 |
+
{
|
15304 |
+
"epoch": 0.2997448099868956,
|
15305 |
+
"grad_norm": 0.8488597869873047,
|
15306 |
+
"learning_rate": 0.00019523312794026768,
|
15307 |
+
"loss": 0.6438,
|
15308 |
+
"step": 2173
|
15309 |
+
},
|
15310 |
+
{
|
15311 |
+
"epoch": 0.2998827505345196,
|
15312 |
+
"grad_norm": 1.2970049381256104,
|
15313 |
+
"learning_rate": 0.00019522871384609417,
|
15314 |
+
"loss": 0.7046,
|
15315 |
+
"step": 2174
|
15316 |
+
},
|
15317 |
+
{
|
15318 |
+
"epoch": 0.3000206910821436,
|
15319 |
+
"grad_norm": 1.1517971754074097,
|
15320 |
+
"learning_rate": 0.000195224297759101,
|
15321 |
+
"loss": 0.8484,
|
15322 |
+
"step": 2175
|
15323 |
+
},
|
15324 |
+
{
|
15325 |
+
"epoch": 0.3001586316297676,
|
15326 |
+
"grad_norm": 1.1928848028182983,
|
15327 |
+
"learning_rate": 0.00019521987967938058,
|
15328 |
+
"loss": 0.7795,
|
15329 |
+
"step": 2176
|
15330 |
+
},
|
15331 |
+
{
|
15332 |
+
"epoch": 0.30029657217739153,
|
15333 |
+
"grad_norm": 0.7839152216911316,
|
15334 |
+
"learning_rate": 0.00019521545960702534,
|
15335 |
+
"loss": 0.337,
|
15336 |
+
"step": 2177
|
15337 |
+
},
|
15338 |
+
{
|
15339 |
+
"epoch": 0.30043451272501553,
|
15340 |
+
"grad_norm": 0.6438336372375488,
|
15341 |
+
"learning_rate": 0.0001952110375421278,
|
15342 |
+
"loss": 0.5161,
|
15343 |
+
"step": 2178
|
15344 |
+
},
|
15345 |
+
{
|
15346 |
+
"epoch": 0.3005724532726395,
|
15347 |
+
"grad_norm": 0.6859175562858582,
|
15348 |
+
"learning_rate": 0.00019520661348478054,
|
15349 |
+
"loss": 0.4835,
|
15350 |
+
"step": 2179
|
15351 |
+
},
|
15352 |
+
{
|
15353 |
+
"epoch": 0.3007103938202635,
|
15354 |
+
"grad_norm": 0.6152809858322144,
|
15355 |
+
"learning_rate": 0.00019520218743507606,
|
15356 |
+
"loss": 0.394,
|
15357 |
+
"step": 2180
|
15358 |
+
},
|
15359 |
+
{
|
15360 |
+
"epoch": 0.30084833436788744,
|
15361 |
+
"grad_norm": 0.6782438158988953,
|
15362 |
+
"learning_rate": 0.00019519775939310705,
|
15363 |
+
"loss": 0.6891,
|
15364 |
+
"step": 2181
|
15365 |
+
},
|
15366 |
+
{
|
15367 |
+
"epoch": 0.3009862749155114,
|
15368 |
+
"grad_norm": 0.9672862887382507,
|
15369 |
+
"learning_rate": 0.00019519332935896613,
|
15370 |
+
"loss": 0.6395,
|
15371 |
+
"step": 2182
|
15372 |
+
},
|
15373 |
+
{
|
15374 |
+
"epoch": 0.3011242154631354,
|
15375 |
+
"grad_norm": 0.6512202620506287,
|
15376 |
+
"learning_rate": 0.000195188897332746,
|
15377 |
+
"loss": 0.4909,
|
15378 |
+
"step": 2183
|
15379 |
+
},
|
15380 |
+
{
|
15381 |
+
"epoch": 0.30126215601075934,
|
15382 |
+
"grad_norm": 1.2240195274353027,
|
15383 |
+
"learning_rate": 0.00019518446331453948,
|
15384 |
+
"loss": 0.9607,
|
15385 |
+
"step": 2184
|
15386 |
+
},
|
15387 |
+
{
|
15388 |
+
"epoch": 0.30140009655838335,
|
15389 |
+
"grad_norm": 0.6230162978172302,
|
15390 |
+
"learning_rate": 0.00019518002730443927,
|
15391 |
+
"loss": 0.4991,
|
15392 |
+
"step": 2185
|
15393 |
+
},
|
15394 |
+
{
|
15395 |
+
"epoch": 0.3015380371060073,
|
15396 |
+
"grad_norm": 1.0958621501922607,
|
15397 |
+
"learning_rate": 0.00019517558930253826,
|
15398 |
+
"loss": 0.6996,
|
15399 |
+
"step": 2186
|
15400 |
+
},
|
15401 |
+
{
|
15402 |
+
"epoch": 0.3016759776536313,
|
15403 |
+
"grad_norm": 0.8363164067268372,
|
15404 |
+
"learning_rate": 0.00019517114930892927,
|
15405 |
+
"loss": 0.8149,
|
15406 |
+
"step": 2187
|
15407 |
+
},
|
15408 |
+
{
|
15409 |
+
"epoch": 0.30181391820125525,
|
15410 |
+
"grad_norm": 0.7095656394958496,
|
15411 |
+
"learning_rate": 0.00019516670732370528,
|
15412 |
+
"loss": 0.4541,
|
15413 |
+
"step": 2188
|
15414 |
+
},
|
15415 |
+
{
|
15416 |
+
"epoch": 0.30195185874887925,
|
15417 |
+
"grad_norm": 0.9975584149360657,
|
15418 |
+
"learning_rate": 0.0001951622633469592,
|
15419 |
+
"loss": 0.7558,
|
15420 |
+
"step": 2189
|
15421 |
+
},
|
15422 |
+
{
|
15423 |
+
"epoch": 0.3020897992965032,
|
15424 |
+
"grad_norm": 0.5672247409820557,
|
15425 |
+
"learning_rate": 0.00019515781737878402,
|
15426 |
+
"loss": 0.2655,
|
15427 |
+
"step": 2190
|
15428 |
+
},
|
15429 |
+
{
|
15430 |
+
"epoch": 0.3022277398441272,
|
15431 |
+
"grad_norm": 0.6647024154663086,
|
15432 |
+
"learning_rate": 0.00019515336941927283,
|
15433 |
+
"loss": 0.5032,
|
15434 |
+
"step": 2191
|
15435 |
+
},
|
15436 |
+
{
|
15437 |
+
"epoch": 0.30236568039175116,
|
15438 |
+
"grad_norm": 0.7022714614868164,
|
15439 |
+
"learning_rate": 0.00019514891946851868,
|
15440 |
+
"loss": 0.5562,
|
15441 |
+
"step": 2192
|
15442 |
+
},
|
15443 |
+
{
|
15444 |
+
"epoch": 0.3025036209393751,
|
15445 |
+
"grad_norm": 0.6081823706626892,
|
15446 |
+
"learning_rate": 0.00019514446752661466,
|
15447 |
+
"loss": 0.4645,
|
15448 |
+
"step": 2193
|
15449 |
+
},
|
15450 |
+
{
|
15451 |
+
"epoch": 0.3026415614869991,
|
15452 |
+
"grad_norm": 0.6334623098373413,
|
15453 |
+
"learning_rate": 0.00019514001359365399,
|
15454 |
+
"loss": 0.6747,
|
15455 |
+
"step": 2194
|
15456 |
+
},
|
15457 |
+
{
|
15458 |
+
"epoch": 0.30277950203462306,
|
15459 |
+
"grad_norm": 0.6264985799789429,
|
15460 |
+
"learning_rate": 0.00019513555766972987,
|
15461 |
+
"loss": 0.5149,
|
15462 |
+
"step": 2195
|
15463 |
+
},
|
15464 |
+
{
|
15465 |
+
"epoch": 0.30291744258224707,
|
15466 |
+
"grad_norm": 0.784883975982666,
|
15467 |
+
"learning_rate": 0.0001951310997549355,
|
15468 |
+
"loss": 0.7089,
|
15469 |
+
"step": 2196
|
15470 |
+
},
|
15471 |
+
{
|
15472 |
+
"epoch": 0.303055383129871,
|
15473 |
+
"grad_norm": 0.5658442974090576,
|
15474 |
+
"learning_rate": 0.00019512663984936422,
|
15475 |
+
"loss": 0.4461,
|
15476 |
+
"step": 2197
|
15477 |
+
},
|
15478 |
+
{
|
15479 |
+
"epoch": 0.303193323677495,
|
15480 |
+
"grad_norm": 0.6339519023895264,
|
15481 |
+
"learning_rate": 0.00019512217795310933,
|
15482 |
+
"loss": 0.5541,
|
15483 |
+
"step": 2198
|
15484 |
+
},
|
15485 |
+
{
|
15486 |
+
"epoch": 0.30333126422511897,
|
15487 |
+
"grad_norm": 0.8487290740013123,
|
15488 |
+
"learning_rate": 0.0001951177140662642,
|
15489 |
+
"loss": 0.7045,
|
15490 |
+
"step": 2199
|
15491 |
+
},
|
15492 |
+
{
|
15493 |
+
"epoch": 0.303469204772743,
|
15494 |
+
"grad_norm": 0.5900312662124634,
|
15495 |
+
"learning_rate": 0.00019511324818892228,
|
15496 |
+
"loss": 0.42,
|
15497 |
+
"step": 2200
|
15498 |
+
},
|
15499 |
+
{
|
15500 |
+
"epoch": 0.303469204772743,
|
15501 |
+
"eval_loss": 0.661845326423645,
|
15502 |
+
"eval_runtime": 23.5093,
|
15503 |
+
"eval_samples_per_second": 2.51,
|
15504 |
+
"eval_steps_per_second": 2.51,
|
15505 |
+
"step": 2200
|
15506 |
}
|
15507 |
],
|
15508 |
"logging_steps": 1,
|
|
|
15517 |
"early_stopping_threshold": 0.0
|
15518 |
},
|
15519 |
"attributes": {
|
15520 |
+
"early_stopping_patience_counter": 2
|
15521 |
}
|
15522 |
},
|
15523 |
"TrainerControl": {
|
|
|
15531 |
"attributes": {}
|
15532 |
}
|
15533 |
},
|
15534 |
+
"total_flos": 3.483236466111283e+17,
|
15535 |
"train_batch_size": 1,
|
15536 |
"trial_name": null,
|
15537 |
"trial_params": null
|